diff --git a/.gitignore b/.gitignore index b4bca2c2..6e11a0b9 100644 --- a/.gitignore +++ b/.gitignore @@ -18,4 +18,8 @@ obj/ *.log TestResults/ -.dotnet \ No newline at end of file +.dotnet +.DS_Store +seed-data/ics-cisa/*.csv +seed-data/ics-cisa/*.xlsx +seed-data/ics-cisa/*.sha256 diff --git a/SPRINTS.md b/SPRINTS.md index ca623a94..088c7890 100644 --- a/SPRINTS.md +++ b/SPRINTS.md @@ -15,6 +15,7 @@ | Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-005 | Update Feedser operator guide for enforcement cutoff
Install guide reiterates the 2025-12-31 cutoff and links audit signals to the rollout checklist. | | Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.HOST | Rate limiter policy binding
Authority host now applies configuration-driven fixed windows to `/token`, `/authorize`, and `/internal/*`; integration tests assert 429 + `Retry-After` headers; docs/config samples refreshed for Docs guild diagrams. | | Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.BUILD | Authority rate-limiter follow-through
`Security.RateLimiting` now fronts token/authorize/internal limiters; Authority + Configuration matrices (`dotnet test src/StellaOps.Authority/StellaOps.Authority.sln`, `dotnet test src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj`) passed on 2025-10-11; awaiting #authority-core broadcast. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-14) | Team Authority Platform & Security Guild | AUTHCORE-BUILD-OPENIDDICT / AUTHCORE-STORAGE-DEVICE-TOKENS / AUTHCORE-BOOTSTRAP-INVITES | Address remaining Authority compile blockers (OpenIddict transaction shim, token device document, bootstrap invite cleanup) so `dotnet build src/StellaOps.Authority.sln` returns success. | | Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | PLG6.DOC | Plugin developer guide polish
Section 9 now documents rate limiter metadata, config keys, and lockout interplay; YAML samples updated alongside Authority config templates. | | Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-001 | Fetch pipeline & state tracking
Summary planner now drives monthly/yearly VINCE fetches, persists pending summaries/notes, and hydrates VINCE detail queue with telemetry.
Team instructions: Read ./AGENTS.md and src/StellaOps.Feedser.Source.CertCc/AGENTS.md. Coordinate daily with Models/Merge leads so new normalizedVersions output and provenance tags stay aligned with ./src/FASTER_MODELING_AND_NORMALIZATION.md. | | Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-002 | VINCE note detail fetcher
Summary planner queues VINCE note detail endpoints, persists raw JSON with SHA/ETag metadata, and records retry/backoff metrics. | @@ -23,7 +24,7 @@ | Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-005 | Deterministic fixtures/tests
Snapshot harness refreshed 2025-10-12; `certcc-*.snapshot.json` regenerated and regression suite green without UPDATE flag drift. | | Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-006 | Telemetry & documentation
`CertCcDiagnostics` publishes summary/detail/parse/map metrics (meter `StellaOps.Feedser.Source.CertCc`), README documents instruments, and log guidance captured for Ops on 2025-10-12. | | Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-007 | Connector test harness remediation
Harness now wires `AddSourceCommon`, resets `FakeTimeProvider`, and passes canned-response regression run dated 2025-10-12. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | BLOCKED (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-008 | Snapshot coverage handoff
Upstream repo version lacks SemVer primitives + provenance decision reason fields, so snapshot regeneration fails; resume once Models/Storage sprint lands those changes. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-008 | Snapshot coverage handoff
Fixtures regenerated with normalized ranges + provenance fields on 2025-10-11; QA handoff notes published and merge backfill unblocked. | | Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-012 | Schema sync & snapshot regen follow-up
Fixtures regenerated with normalizedVersions + provenance decision reasons; handoff notes updated for Merge backfill 2025-10-12. | | Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-009 | Detail/map reintegration plan
Staged reintegration plan published in `src/StellaOps.Feedser.Source.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md`; coordinates enablement with FEEDCONN-CERTCC-02-004. | | Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.Source.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-010 | Partial-detail graceful degradation
Detail fetch now tolerates 404/403/410 responses and regression tests cover mixed endpoint availability. | @@ -48,16 +49,20 @@ | Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Feedser.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-OPS-01-007 | Authority resilience adoption
Deployment docs and CLI notes explain the LIB5 resilience knobs for rollout.
Instructions to work:
DONE Read ./AGENTS.md and src/StellaOps.Feedser.WebService/AGENTS.md. These items were mid-flight; resume implementation ensuring docs/operators receive timely updates. | | Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHCORE-ENGINE-01-001 | CORE8.RL — Rate limiter plumbing validated; integration tests green and docs handoff recorded for middleware ordering + Retry-After headers (see `docs/dev/authority-rate-limit-tuning-outline.md` for continuing guidance). | | Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHCRYPTO-ENGINE-01-001 | SEC3.A — Shared metadata resolver confirmed via host test run; SEC3.B now unblocked for tuning guidance (outline captured in `docs/dev/authority-rate-limit-tuning-outline.md`). | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DOING (2025-10-11) | Team Authority Platform & Security Guild | AUTHPLUG-DOCS-01-001 | PLG6.DOC — Docs guild resuming diagram/copy updates using the captured limiter context + configuration notes (reference `docs/dev/authority-rate-limit-tuning-outline.md` for tuning matrix + observability copy).
Instructions to work:
Read ./AGENTS.md plus module-specific AGENTS. Restart the blocked rate-limiter workstream (Authority host + cryptography) so the plugin docs team can finish diagrams. Coordinate daily; use ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md where rate limiting interacts with conflict policy. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Normalization/TASKS.md | — | Team Normalization & Storage Backbone | FEEDNORM-NORM-02-001 | SemVer normalized rule emitter
Instructions to work:
Read ./AGENTS.md and module AGENTS. Use ./src/FASTER_MODELING_AND_NORMALIZATION.md to build the shared rule generator; sync daily with storage and connector owners. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | — | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-001 | Normalized range dual-write + backfill | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | — | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-002 | Provenance decision reason persistence | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | — | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-003 | Normalized versions indexing
Instructions to work:
Read ./AGENTS.md and storage AGENTS. Implement dual-write/backfill and index creation using the shapes from ./src/FASTER_MODELING_AND_NORMALIZATION.md; coordinate with connectors entering the sprint. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-13) | Team Authority Platform & Security Guild | AUTHSEC-DOCS-01-002 | SEC3.B — Published `docs/security/rate-limits.md` with tuning matrix, alert thresholds, and lockout interplay guidance; Docs guild can lift copy into plugin guide. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-14) | Team Authority Platform & Security Guild | AUTHSEC-CRYPTO-02-001 | SEC5.B1 — Introduce libsodium signing provider and parity tests to unblock CLI verification enhancements. | +| Sprint 1 | Bootstrap & Replay Hardening | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-14) | Security Guild | AUTHSEC-CRYPTO-02-004 | SEC5.D/E — Finish bootstrap invite lifecycle (API/store/cleanup) and token device heuristics; build currently red due to pending handler integration. | +| Sprint 1 | Developer Tooling | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI | AUTHCLI-DIAG-01-001 | Surface password policy diagnostics in CLI startup/output so operators see weakened overrides immediately. | +| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHPLUG-DOCS-01-001 | PLG6.DOC — Developer guide copy + diagrams merged 2025-10-11; limiter guidance incorporated and handed to Docs guild for asset export. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Normalization/TASKS.md | DONE (2025-10-12) | Team Normalization & Storage Backbone | FEEDNORM-NORM-02-001 | SemVer normalized rule emitter
`SemVerRangeRuleBuilder` shipped 2025-10-12 with comparator/`||` support and fixtures aligning to `FASTER_MODELING_AND_NORMALIZATION.md`. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-001 | Normalized range dual-write + backfill | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-002 | Provenance decision reason persistence | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-003 | Normalized versions indexing
Indexes seeded + docs updated 2025-10-11 to cover flattened normalized rules for connector adoption. | | Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Merge/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDMERGE-ENGINE-02-002 | Normalized versions union & dedupe
Affected package resolver unions/dedupes normalized rules, stamps merge provenance with `decisionReason`, and tests cover the rollout. | | Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-001 | GHSA normalized versions & provenance | | Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-004 | GHSA credits & ecosystem severity mapping | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | TODO | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-005 | GitHub quota monitoring & retries | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | TODO | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-006 | Production credential & scheduler rollout | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-005 | GitHub quota monitoring & retries | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-006 | Production credential & scheduler rollout | | Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-007 | Credit parity regression fixtures | | Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-002 | NVD normalized versions & timestamps | | Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-004 | NVD CVSS & CWE precedence payloads | @@ -65,17 +70,17 @@ | Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-003 | OSV normalized versions & freshness | | Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-004 | OSV references & credits alignment | | Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-005 | Fixture updater workflow
Resolved 2025-10-12: OSV mapper now derives canonical PURLs for Go + scoped npm packages when raw payloads omit `purl`; conflict fixtures unchanged for invalid npm names. Verified via `dotnet test src/StellaOps.Feedser.Source.Osv.Tests`, `src/StellaOps.Feedser.Source.Ghsa.Tests`, `src/StellaOps.Feedser.Source.Nvd.Tests`, and backbone normalization/storage suites. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Acsc/TASKS.md | Implementation DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ACSC-02-001 … 02-008 | Fetch pipeline, DTO parser, canonical mapper, fixtures, and README shipped 2025-10-12; downstream export integration still pending future tasks. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Cccs/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CCCS-02-001 … 02-007 | Atom feed verified 2025-10-11, history/caching review and FR locale enumeration pending. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.CertBund/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CERTBUND-02-001 … 02-007 | BSI RSS directory confirmed CERT-Bund feed 2025-10-11, history assessment pending. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Kisa/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-KISA-02-001 … 02-007 | KNVD RSS endpoint identified 2025-10-11, access headers/session strategy outstanding. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ru.Bdu/TASKS.md | Build DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-RUBDU-02-001 … 02-008 | TLS bundle + connectors landed 2025-10-12; fetch/parse/map flow emits advisories, fixtures & telemetry follow-up pending. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ru.Nkcki/TASKS.md | Build DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-NKCKI-02-001 … 02-008 | JSON bulletin fetch + canonical mapping live 2025-10-12; regression fixtures added but blocked on Mongo2Go libcrypto dependency for test execution. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ics.Cisa/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ICSCISA-02-001 … 02-008 | new ICS RSS endpoint logged 2025-10-11 but Akamai blocks direct pulls, fallback strategy task opened. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Vndr.Cisco/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CISCO-02-001 … 02-007 | openVuln API + RSS reviewed 2025-10-11, auth/pagination memo pending. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Vndr.Msrc/TASKS.md | Research DOING | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-MSRC-02-001 … 02-007 | MSRC API docs reviewed 2025-10-11, auth/throttling comparison memo pending.
Instructions to work:
Read ./AGENTS.md plus each module's AGENTS file. Parallelize research, ingestion, mapping, fixtures, and docs using the normalized rule shape from ./src/FASTER_MODELING_AND_NORMALIZATION.md. Coordinate daily with the merge coordination task from Sprint 1. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Cve/TASKS.md | — | Team Connector Support & Monitoring | FEEDCONN-CVE-02-001 … 02-002 | Instructions to work:
Read ./AGENTS.md and module AGENTS. Deliver operator docs and monitoring instrumentation required for broader feed rollout. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Kev/TASKS.md | — | Team Connector Support & Monitoring | FEEDCONN-KEV-02-001 … 02-002 | Instructions to work:
Read ./AGENTS.md and module AGENTS. Deliver operator docs and monitoring instrumentation required for broader feed rollout. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Acsc/TASKS.md | Implementation DONE (2025-10-12) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ACSC-02-001 … 02-008 | Fetch→parse→map pipeline, fixtures, diagnostics, and README finished 2025-10-12; awaiting downstream export follow-ups tracked separately. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Cccs/TASKS.md | DONE (2025-10-16) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CCCS-02-001 … 02-008 | Observability meter, historical harvest plan, and DOM sanitizer refinements wrapped; ops notes live under `docs/ops/feedser-cccs-operations.md` with fixtures validating EN/FR list handling. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.CertBund/TASKS.md | DONE (2025-10-15) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CERTBUND-02-001 … 02-008 | Telemetry/docs (02-006) and history/locale sweep (02-007) completed alongside pipeline; runbook `docs/ops/feedser-certbund-operations.md` captures locale guidance and offline packaging. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Kisa/TASKS.md | DONE (2025-10-14) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-KISA-02-001 … 02-007 | Connector, tests, and telemetry/docs (02-006) finalized; localisation notes in `docs/dev/kisa_connector_notes.md` complete rollout. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ru.Bdu/TASKS.md | DONE (2025-10-14) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-RUBDU-02-001 … 02-008 | Fetch/parser/mapper refinements, regression fixtures, telemetry/docs, access options, and trusted root packaging all landed; README documents offline access strategy. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ru.Nkcki/TASKS.md | DONE (2025-10-13) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-NKCKI-02-001 … 02-008 | Listing fetch, parser, mapper, fixtures, telemetry/docs, and archive plan finished; Mongo2Go/libcrypto dependency resolved via bundled OpenSSL noted in ops guide. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Ics.Cisa/TASKS.md | DONE (2025-10-16) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ICSCISA-02-001 … 02-011 | Feed parser attachment fixes, SemVer exact values, regression suites, telemetry/docs updates, and handover complete; ops runbook now details attachment verification + proxy usage. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Vndr.Cisco/TASKS.md | Implementation DONE (2025-10-14) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CISCO-02-001 … 02-007 | OAuth fetch pipeline, DTO/mapping, tests, and telemetry/docs shipped; monitoring enablement now tracked via follow-up ops tasks (02-006+). | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Vndr.Msrc/TASKS.md | DONE (2025-10-15) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-MSRC-02-001 … 02-008 | Azure AD onboarding (02-008) unblocked fetch/parse/map pipeline; fixtures, telemetry/docs, and Offline Kit guidance published in `docs/ops/feedser-msrc-operations.md`. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Cve/TASKS.md | DONE (2025-10-15) | Team Connector Support & Monitoring | FEEDCONN-CVE-02-001 … 02-002 | CVE data-source selection, fetch pipeline, and docs landed 2025-10-10. 2025-10-15: smoke verified using the seeded mirror fallback; connector now logs a warning and pulls from `seed-data/cve/` until live CVE Services credentials arrive. | +| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Feedser.Source.Kev/TASKS.md | DONE (2025-10-12) | Team Connector Support & Monitoring | FEEDCONN-KEV-02-001 … 02-002 | KEV catalog ingestion, fixtures, telemetry, and schema validation completed 2025-10-12; ops dashboard published. | | Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-01-001 | Canonical schema docs refresh
Updated canonical schema + provenance guides with SemVer style, normalized version rules, decision reason change log, and migration notes. | | Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-02-001 | Feedser-SemVer Playbook
Published merge playbook covering mapper patterns, dedupe flow, indexes, and rollout checklist. | | Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-02-002 | Normalized versions query guide
Delivered Mongo index/query addendum with `$unwind` recipes, dedupe checks, and operational checklist.
Instructions to work:
DONE Read ./AGENTS.md and docs/AGENTS.md. Document every schema/index/query change produced in Sprint 1-2 leveraging ./src/FASTER_MODELING_AND_NORMALIZATION.md. | @@ -92,4 +97,4 @@ | Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Source.Nvd/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-NVD-04-002 | NVD conflict regression fixtures | | Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Feedser.Source.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-OSV-04-002 | OSV conflict regression fixtures
Instructions to work:
Read ./AGENTS.md and module AGENTS. Produce fixture triples supporting the precedence/tie-breaker paths defined in ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md and hand them to Merge QA. | | Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | DONE (2025-10-11) | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-001 | Feedser Conflict Rules
Runbook published at `docs/ops/feedser-conflict-resolution.md`; metrics/log guidance aligned with Sprint 3 merge counters. | -| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | TODO | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-002 | Conflict runbook ops rollout
Instructions to work:
Read ./AGENTS.md and docs/AGENTS.md. Once GHSA/NVD/OSV regression fixtures (FEEDCONN-GHSA-04-002, FEEDCONN-NVD-04-002, FEEDCONN-OSV-04-002) are delivered, schedule the Ops review, apply the alert thresholds captured in `docs/ops/feedser-authority-audit-runbook.md`, and record change-log linkage after sign-off. Use ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md for ongoing rule references. | +| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | DONE (2025-10-16) | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-002 | Conflict runbook ops rollout
Ops review completed, alert thresholds applied, and change log appended in `docs/ops/feedser-conflict-resolution.md`; task closed after connector signals verified. | diff --git a/docs/11_AUTHORITY.md b/docs/11_AUTHORITY.md index faa6d946..3fad4471 100644 --- a/docs/11_AUTHORITY.md +++ b/docs/11_AUTHORITY.md @@ -67,8 +67,9 @@ Authority centralises revocation in `authority_revocations` with deterministic c **Export surfaces** (deterministic output, suitable for Offline Kit): - CLI: `stella auth revoke export --output ./out` writes `revocation-bundle.json`, `.jws`, `.sha256`. +- Verification: `stella auth revoke verify --bundle --signature --key ` validates detached JWS signatures before distribution, selecting the crypto provider advertised in the detached header (see `docs/security/revocation-bundle.md`). - API: `GET /internal/revocations/export` (requires bootstrap API key) returns the same payload. -- Verification: `stella auth revoke verify` validates schema, digest, and detached JWS using cached JWKS or offline keys. +- Verification: `stella auth revoke verify` validates schema, digest, and detached JWS using cached JWKS or offline keys, automatically preferring the hinted provider (libsodium builds honour `provider=libsodium`; other builds fall back to the managed provider). **Consumer guidance:** diff --git a/docs/24_OFFLINE_KIT.md b/docs/24_OFFLINE_KIT.md index 156beb90..dbfbe283 100755 --- a/docs/24_OFFLINE_KIT.md +++ b/docs/24_OFFLINE_KIT.md @@ -10,32 +10,50 @@ The **Offline Update Kit** packages everything Stella Ops needs to run on a completely isolated network: -| Component | Contents | -|-----------|----------| -| **Merged vulnerability feeds** | OSV, GHSA plus optional NVD 2.0, CNNVD, CNVD, ENISA, JVN and BDU | -| **Container images** | `stella-ops`, *Zastava* sidecar (x86‑64 & arm64) | -| **Provenance** | Cosign signature, SPDX 2.3 SBOM, in‑toto SLSA attestation | -| **Delta patches** | Daily diff bundles keep size \< 350 MB | - -*Scanner core:* C# 12 on **.NET {{ dotnet }}**. -*Imports are idempotent and atomic — no service downtime.* +| Component | Contents | +|-----------|----------| +| **Merged vulnerability feeds** | OSV, GHSA plus optional NVD 2.0, CNNVD, CNVD, ENISA, JVN and BDU | +| **Container images** | `stella-ops`, *Zastava* sidecar (x86‑64 & arm64) | +| **Provenance** | Cosign signature, SPDX 2.3 SBOM, in‑toto SLSA attestation | +| **Attested manifest** | `offline-manifest.json` + detached JWS covering bundle metadata, signed during export. | +| **Delta patches** | Daily diff bundles keep size \< 350 MB | + +**RU BDU note:** ship the official Russian Trusted Root/Sub CA bundle (`certificates/russian_trusted_bundle.pem`) inside the kit so `feedser:httpClients:source.bdu:trustedRootPaths` can resolve it when the service runs in an air‑gapped network. Drop the most recent `vulxml.zip` alongside the kit if operators need a cold-start cache. + +*Scanner core:* C# 12 on **.NET {{ dotnet }}**. +*Imports are idempotent and atomic — no service downtime.* --- ## 1 · Download & verify -```bash -curl -LO https://get.stella-ops.org/ouk/stella-ops-offline-kit-.tgz -curl -LO https://get.stella-ops.org/ouk/stella-ops-offline-kit-.tgz.sig - -cosign verify-blob \ - --key https://stella-ops.org/keys/cosign.pub \ - --signature stella-ops-offline-kit-.tgz.sig \ - stella-ops-offline-kit-.tgz +```bash +curl -LO https://get.stella-ops.org/ouk/stella-ops-offline-kit-.tgz +curl -LO https://get.stella-ops.org/ouk/stella-ops-offline-kit-.tgz.sig +curl -LO https://get.stella-ops.org/ouk/offline-manifest-.json +curl -LO https://get.stella-ops.org/ouk/offline-manifest-.json.jws + +cosign verify-blob \ + --key https://stella-ops.org/keys/cosign.pub \ + --signature stella-ops-offline-kit-.tgz.sig \ + stella-ops-offline-kit-.tgz ```` -Verification prints **OK** and the SHA‑256 digest; cross‑check against the -[changelog](https://git.stella-ops.org/stella-ops/offline-kit/-/releases). +Verification prints **OK** and the SHA‑256 digest; cross‑check against the +[changelog](https://git.stella-ops.org/stella-ops/offline-kit/-/releases). + +Validate the attested manifest before distribution: + +```bash +cosign verify-blob \ + --key https://stella-ops.org/keys/cosign.pub \ + --signature offline-manifest-.json.jws \ + offline-manifest-.json + +jq '.artifacts[] | {name, sha256, size, capturedAt}' offline-manifest-.json +``` + +The manifest enumerates every artefact (`name`, `sha256`, `size`, `capturedAt`) and is signed with the same key registry as Authority revocation bundles. Operators can ship the manifest alongside the tarball so downstream mirrors can re-verify without unpacking the kit. --- diff --git a/docs/README.md b/docs/README.md index 653c9b7b..b10929c1 100755 --- a/docs/README.md +++ b/docs/README.md @@ -58,11 +58,15 @@ Everything here is open‑source and versioned — when you check out a git ta - **22 – [CI/CD Recipes Library](ci/20_CI_RECIPES.md)** - **23 – [FAQ](23_FAQ_MATRIX.md)** - **24 – [Offline Update Kit Admin Guide](24_OUK_ADMIN_GUIDE.md)** -- **26 – [Authority Key Rotation Playbook](ops/authority-key-rotation.md)** - **25 – [Feedser Apple Connector Operations](ops/feedser-apple-operations.md)** - -### Legal & licence -- **29 – [Legal & Quota FAQ](29_LEGAL_FAQ_QUOTA.md)** +- **26 – [Authority Key Rotation Playbook](ops/authority-key-rotation.md)** +- **27 – [Feedser CCCS Connector Operations](ops/feedser-cccs-operations.md)** +- **28 – [Feedser CISA ICS Connector Operations](ops/feedser-icscisa-operations.md)** +- **29 – [Feedser CERT-Bund Connector Operations](ops/feedser-certbund-operations.md)** +- **30 – [Feedser MSRC Connector – AAD Onboarding](ops/feedser-msrc-operations.md)** + +### Legal & licence +- **31 – [Legal & Quota FAQ](29_LEGAL_FAQ_QUOTA.md)** diff --git a/docs/TASKS.md b/docs/TASKS.md index b3e8b7a7..6291fe92 100644 --- a/docs/TASKS.md +++ b/docs/TASKS.md @@ -7,6 +7,8 @@ | DOC3.Feedser-Authority | DONE (2025-10-12) | Docs Guild, DevEx | FSR4 | Polish operator/runbook sections (DOC3/DOC5) to document Feedser authority rollout, bypass logging, and enforcement checklist. | ✅ DOC3/DOC5 updated with audit runbook references; ✅ enforcement deadline highlighted; ✅ Docs guild sign-off. | | DOC5.Feedser-Runbook | DONE (2025-10-12) | Docs Guild | DOC3.Feedser-Authority | Produce dedicated Feedser authority audit runbook covering log fields, monitoring recommendations, and troubleshooting steps. | ✅ Runbook published; ✅ linked from DOC3/DOC5; ✅ alerting guidance included. | | FEEDDOCS-DOCS-05-001 | DONE (2025-10-11) | Docs Guild | FEEDMERGE-ENGINE-04-001, FEEDMERGE-ENGINE-04-002 | Publish Feedser conflict resolution runbook covering precedence workflow, merge-event auditing, and Sprint 3 metrics. | ✅ `docs/ops/feedser-conflict-resolution.md` committed; ✅ metrics/log tables align with latest merge code; ✅ Ops alert guidance handed to Feedser team. | -| FEEDDOCS-DOCS-05-002 | TODO | Docs Guild, Feedser Ops | FEEDDOCS-DOCS-05-001 | Capture ops sign-off: circulate conflict runbook, tune alert thresholds, and document rollout decisions in change log. | ✅ Ops review recorded; ✅ alert thresholds finalised using `docs/ops/feedser-authority-audit-runbook.md`; ✅ change-log entry linked from runbook once GHSA/NVD/OSV regression fixtures land. | +| FEEDDOCS-DOCS-05-002 | DONE (2025-10-16) | Docs Guild, Feedser Ops | FEEDDOCS-DOCS-05-001 | Ops sign-off captured: conflict runbook circulated, alert thresholds tuned, and rollout decisions documented in change log. | ✅ Ops review recorded; ✅ alert thresholds finalised using `docs/ops/feedser-authority-audit-runbook.md`; ✅ change-log entry linked from runbook once GHSA/NVD/OSV regression fixtures land. | > Update statuses (TODO/DOING/REVIEW/DONE/BLOCKED) as progress changes. Keep guides in sync with configuration samples under `etc/`. + +> Remark (2025-10-13, DOC4.AUTH-PDG): Rate limit guide published (`docs/security/rate-limits.md`) and handed to plugin docs team for diagram uplift once PLG6.DIAGRAM lands. diff --git a/docs/artifacts/icscisa/20251014-sample-feed.xml b/docs/artifacts/icscisa/20251014-sample-feed.xml new file mode 100644 index 00000000..3bafb47a --- /dev/null +++ b/docs/artifacts/icscisa/20251014-sample-feed.xml @@ -0,0 +1,27 @@ + + + + CISA ICS Advisories + + ICSA-25-123-01: Example ICS Advisory + https://www.cisa.gov/news-events/ics-advisories/icsa-25-123-01 + Mon, 13 Oct 2025 12:00:00 GMT + Vendor: Example Corp

+

Products: ControlSuite 4.2

+

CVE-2024-12345 allows remote code execution.

+

Download PDF

+ ]]>
+
+ + ICSMA-25-045-01: Example Medical Advisory + https://www.cisa.gov/news-events/ics-medical-advisories/icsma-25-045-01 + Tue, 14 Oct 2025 09:30:00 GMT + Vendor: HealthTech

+

Products: InfusionManager 2.1

+

Multiple vulnerabilities including CVE-2025-11111 and CVE-2025-22222.

+ ]]>
+
+
+
diff --git a/docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md b/docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md index 1b0ceaa4..788f6044 100644 --- a/docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md +++ b/docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md @@ -44,6 +44,8 @@ Capability flags let the host reason about what your plug-in supports: **Configuration path normalisation:** Manifest-relative paths (e.g., `tokenSigning.keyDirectory: "../keys"`) are resolved against the YAML file location and environment variables are expanded before validation. Plug-ins should expect to receive an absolute, canonical path when options are injected. +**Password policy guardrails:** The Standard registrar logs a warning when a plug-in weakens the default password policy (minimum length or required character classes). Keep overrides at least as strong as the compiled defaults—operators treat the warning as an actionable security deviation. + ## 4. Project Scaffold - Target **.NET 10 preview**, enable nullable, treat warnings as errors, and mark Authority plug-ins with `true`. - Minimum references: diff --git a/docs/dev/fixtures.md b/docs/dev/fixtures.md index f5e18a0e..15f759be 100644 --- a/docs/dev/fixtures.md +++ b/docs/dev/fixtures.md @@ -35,3 +35,11 @@ fixture sets, where they live, and how to regenerate them safely. - **Verification:** Inspect the generated diffs and re-run `dotnet test src/StellaOps.Feedser.Source.Vndr.Apple.Tests/StellaOps.Feedser.Source.Vndr.Apple.Tests.csproj` without the env var to confirm determinism. > **Tip for other connector owners:** mirror the sentinel + `WSLENV` pattern (`touch .update--fixtures`, append the env var via `WSLENV`) when you add fixture refresh scripts so contributors running under WSL inherit the regeneration flag automatically. + +## KISA advisory fixtures + +- **Location:** `src/StellaOps.Feedser.Source.Kisa.Tests/Fixtures/kisa-{feed,detail}.(xml|json)` +- **Purpose:** Used by `KisaConnectorTests` to verify Hangul-aware fetch → parse → map flows and to assert telemetry counters stay wired. +- **Regeneration:** `UPDATE_KISA_FIXTURES=1 dotnet test src/StellaOps.Feedser.Source.Kisa.Tests/StellaOps.Feedser.Source.Kisa.Tests.csproj` +- **Verification:** Re-run the same test suite without the env var; confirm advisory content remains NFC-normalised and HTML is sanitised. Metrics assertions will fail if counters drift. +- **Localisation note:** RSS `category` values (e.g. `취약점정보`) remain in Hangul—do not translate them in fixtures; they feed directly into metrics/log tags. diff --git a/docs/dev/kisa_connector_notes.md b/docs/dev/kisa_connector_notes.md new file mode 100644 index 00000000..ae8dba52 --- /dev/null +++ b/docs/dev/kisa_connector_notes.md @@ -0,0 +1,45 @@ +# KISA Connector Observability & Localisation + +The KISA/KNVD connector now ships with structured telemetry, richer logging, and a localisation brief so Docs/QA can extend operator material without reverse-engineering the source. + +## Telemetry counters + +All metrics are emitted from `KisaDiagnostics` (`Meter` name `StellaOps.Feedser.Source.Kisa`). + +| Metric | Description | Tags | +| --- | --- | --- | +| `kisa.feed.attempts` | RSS fetch attempts per scheduled job. | — | +| `kisa.feed.success` | Successful RSS fetches (increments even when no new items). | — | +| `kisa.feed.failures` | RSS fetch failures. | `reason` (exception type) | +| `kisa.feed.items` | Number of items returned by the RSS window. | — | +| `kisa.detail.attempts` | Advisory detail fetch attempts. | `category` (Hangul category from RSS) | +| `kisa.detail.success` | Detail payloads fetched and persisted. | `category` | +| `kisa.detail.unchanged` | HTTP 304 responses reused from cache. | `category` | +| `kisa.detail.failures` | Detail fetch failures or empty payloads. | `category`, `reason` | +| `kisa.parse.attempts` | Documents pulled from Mongo for parsing. | `category` | +| `kisa.parse.success` | Documents parsed into DTOs. | `category` | +| `kisa.parse.failures` | Download or JSON parse failures. | `category`, `reason` | +| `kisa.map.success` | Canonical advisories persisted. | `severity` (e.g. `High`, `unknown`) | +| `kisa.map.failures` | Mapping or DTO hydration failures. | `severity`, `reason` | +| `kisa.cursor.updates` | Published cursor advanced after ingest. | — | + +> `category` tags surface the original Hangul labels (for example `취약점정보`), normalised to NFC. Downstream dashboards should render them as-is; do not transliterate or trim. + +## Logging patterns + +- `Information` level summary when the RSS feed completes (`ItemCount`), on each persisted detail document (IDX, category, documentId), and when a canonical advisory is written (IDX/severity). +- `Debug` level logs capture cache hits (304) and cursor movements (`Published` timestamp). +- `Warning` level emits when a document or DTO is missing so operators can correlate with parse/map counters. +- `Error` level retains exception context for feed/detail/parse/map failures; state repository backoffs are still applied. + +The messages use structured properties (`Idx`, `Category`, `DocumentId`, `Severity`) so Grafana/Loki dashboards can filter without regex. + +## Localisation notes for Docs & QA + +- Hangul fields (`title`, `summary`, `category`, `reference.label`, product vendor/name) are normalised to NFC before storage. Sample category `취약점정보` roughly translates to “vulnerability information”. +- Advisory HTML is sanitised via `HtmlContentSanitizer`, stripping script/style while preserving inline anchors for translation pipelines. +- Metrics carry Hangul `category` tags and logging keeps Hangul strings intact; this ensures air-gapped operators can validate native-language content without relying on MT. +- Fixtures live under `src/StellaOps.Feedser.Source.Kisa.Tests/Fixtures/`. Regenerate with `UPDATE_KISA_FIXTURES=1 dotnet test src/StellaOps.Feedser.Source.Kisa.Tests/StellaOps.Feedser.Source.Kisa.Tests.csproj`. +- The regression suite asserts canonical mapping, state cleanup, and telemetry counters (`KisaConnectorTests.Telemetry_RecordsMetrics`) so QA can track instrumentation drift. + +For operator docs, link to this brief when documenting Hangul handling or counter dashboards so localisation reviewers have a single reference point. diff --git a/docs/dev/normalized_versions_rollout.md b/docs/dev/normalized_versions_rollout.md index f0c3fd6e..832192c8 100644 --- a/docs/dev/normalized_versions_rollout.md +++ b/docs/dev/normalized_versions_rollout.md @@ -20,19 +20,19 @@ This dashboard tracks connector readiness for emitting `AffectedPackage.Normaliz |-----------|------------|---------------------------|-------------|--------------------| | Acsc | BE-Conn-ACSC | ❌ Not started – mapper pending | 2025-10-11 | Design DTOs + mapper with normalized rule array; see `src/StellaOps.Feedser.Source.Acsc/TASKS.md`. | | Cccs | BE-Conn-CCCS | ❌ Not started – mapper pending | 2025-10-11 | Add normalized SemVer array in canonical mapper; coordinate fixtures per `TASKS.md`. | -| CertBund | BE-Conn-CERTBUND | ❌ Not started – mapper pending | 2025-10-11 | Capture firmware-style ranges; emit normalized payload; `src/StellaOps.Feedser.Source.CertBund/TASKS.md`. | +| CertBund | BE-Conn-CERTBUND | ✅ Canonical mapper emitting vendor ranges | 2025-10-14 | Normalized vendor range payloads landed alongside telemetry/docs updates; see `src/StellaOps.Feedser.Source.CertBund/TASKS.md`. | | CertCc | BE-Conn-CERTCC | ⚠️ In progress – fetch pipeline DOING | 2025-10-11 | Implement VINCE mapper with SemVer/NEVRA rules; unblock snapshot regeneration; `src/StellaOps.Feedser.Source.CertCc/TASKS.md`. | | Kev | BE-Conn-KEV | ✅ Normalized catalog/due-date rules verified | 2025-10-12 | Fixtures reconfirmed via `dotnet test src/StellaOps.Feedser.Source.Kev.Tests`; `src/StellaOps.Feedser.Source.Kev/TASKS.md`. | | Cve | BE-Conn-CVE | ✅ Normalized SemVer rules verified | 2025-10-12 | Snapshot parity green (`dotnet test src/StellaOps.Feedser.Source.Cve.Tests`); `src/StellaOps.Feedser.Source.Cve/TASKS.md`. | | Ghsa | BE-Conn-GHSA | ⚠️ DOING – normalized rollout task active | 2025-10-11 18:45 UTC | Wire `SemVerRangeRuleBuilder` + refresh fixtures; `src/StellaOps.Feedser.Source.Ghsa/TASKS.md`. | | Osv | BE-Conn-OSV | ✅ SemVer mapper & parity fixtures verified | 2025-10-12 | GHSA parity regression passing (`dotnet test src/StellaOps.Feedser.Source.Osv.Tests`); `src/StellaOps.Feedser.Source.Osv/TASKS.md`. | | Ics.Cisa | BE-Conn-ICS-CISA | ❌ Not started – mapper TODO | 2025-10-11 | Plan SemVer/firmware scheme selection; `src/StellaOps.Feedser.Source.Ics.Cisa/TASKS.md`. | -| Kisa | BE-Conn-KISA | ❌ Not started – mapper TODO | 2025-10-11 | Localisation-aware mapper with normalized rules; `src/StellaOps.Feedser.Source.Kisa/TASKS.md`. | -| Ru.Bdu | BE-Conn-BDU | ❌ Not started – mapper TODO | 2025-10-11 | Emit normalized ranges, capture provenance; `src/StellaOps.Feedser.Source.Ru.Bdu/TASKS.md`. | +| Kisa | BE-Conn-KISA | ✅ Landed 2025-10-14 (mapper + telemetry) | 2025-10-11 | Hangul-aware mapper emits normalized rules; see `docs/dev/kisa_connector_notes.md` for localisation/metric details. | +| Ru.Bdu | BE-Conn-BDU | ✅ Raw scheme emitted | 2025-10-14 | Mapper now writes `ru-bdu.raw` normalized rules with provenance + telemetry; `src/StellaOps.Feedser.Source.Ru.Bdu/TASKS.md`. | | Ru.Nkcki | BE-Conn-Nkcki | ❌ Not started – mapper TODO | 2025-10-11 | Similar to BDU; ensure Cyrillic provenance preserved; `src/StellaOps.Feedser.Source.Ru.Nkcki/TASKS.md`. | | Vndr.Apple | BE-Conn-Apple | ✅ Shipped – emitting normalized arrays | 2025-10-11 | Continue fixture/tooling work; `src/StellaOps.Feedser.Source.Vndr.Apple/TASKS.md`. | -| Vndr.Cisco | BE-Conn-Cisco | ❌ Not started – mapper TODO | 2025-10-11 | Decide on scheme (`semver` vs custom) before emitting rules; `src/StellaOps.Feedser.Source.Vndr.Cisco/TASKS.md`. | -| Vndr.Msrc | BE-Conn-MSRC | ❌ Not started – mapper TODO | 2025-10-11 | Gather samples, define scheme, emit normalized rules; `src/StellaOps.Feedser.Source.Vndr.Msrc/TASKS.md`. | +| Vndr.Cisco | BE-Conn-Cisco | ✅ SemVer + vendor extensions emitted | 2025-10-14 | Connector outputs SemVer primitives with `cisco.productId` notes; see `CiscoMapper` and fixtures for coverage. | +| Vndr.Msrc | BE-Conn-MSRC | ✅ Map + normalized build rules landed | 2025-10-15 | `MsrcMapper` emits `msrc.build` normalized rules with CVRF references; see `src/StellaOps.Feedser.Source.Vndr.Msrc/TASKS.md`. | | Nvd | BE-Conn-NVD | ⚠️ Needs follow-up – mapper complete but normalized array MR pending | 2025-10-11 | Align CVE notes + normalized payload flag; `src/StellaOps.Feedser.Source.Nvd/TASKS.md`. | Legend: ✅ complete, ⚠️ in progress/partial, ❌ not started. diff --git a/docs/feedser-connector-research-20251011.md b/docs/feedser-connector-research-20251011.md index 37f1db76..97936ede 100644 --- a/docs/feedser-connector-research-20251011.md +++ b/docs/feedser-connector-research-20251011.md @@ -7,16 +7,20 @@ Snapshot of direct network checks performed on 2025-10-11 (UTC) for the national - Next actions: prototype `SocketsHttpHandler` settings (`RequestVersionOrLower`, allow fallback to relay), capture successful headers from partner vantage (need retention + cache semantics), and keep `FEEDCONN-SHARED-HTTP2-001` open for downgrade work. ## CCCS (Canada) -- RSS endpoint (`https://cyber.gc.ca/api/cccs/rss/v1/get?...`) 301s to Atom feed (`/api/cccs/atom/v1/get?...`) with 50-entry window, HTML-heavy `` fields, and no cache headers. -- Next actions: enumerate additional `feed` query values, sanitise inline HTML for DTO storage, and track retention depth via HTML pagination (`?page=`). +- JSON endpoint (`https://www.cyber.gc.ca/api/cccs/threats/v1/get?lang=&content_type=cccs_threat`) returns ~5 100 records per language; `page=` still works for segmented pulls and the earliest `date_created` seen is 2018‑06‑08 (EN) / 2018‑06‑08 (FR). Use an explicit `User-Agent` to avoid 403 responses. +- Follow-up: telemetry, sanitiser coverage, and backfill procedures are documented in `docs/ops/feedser-cccs-operations.md` (2025‑10‑15). Adjust `maxEntriesPerFetch` when performing historical sweeps so cursor state remains responsive. ## CERT-Bund (Germany) -- `https://wid.cert-bund.de/content/public/securityAdvisory/rss` responds 200 without cookies (250-item window, German taxonomy). Detail links load an Angular SPA that fetches JSON behind session cookies. -- Next actions: script SPA cookie/bootstrap, discover JSON detail endpoint, and capture advisory schema for parser planning. +- `https://wid.cert-bund.de/content/public/securityAdvisory/rss` responds 200 without cookies (≈250-item window, German taxonomy). Detail links load an Angular SPA that fetches JSON behind the bootstrap session. +- Confirmed `GET https://wid.cert-bund.de/portal/api/securityadvisory?name=` returns JSON once the portal cookie container is primed; payload includes severity, CVEs, products, and references used by the connector fixtures. +- Historical advisories accessible through the SPA search/export endpoints once the `XSRF-TOKEN` cookie (exposed via `GET /portal/api/security/csrf`) is supplied with the `X-XSRF-TOKEN` header: + - `POST /portal/api/securityadvisory/search` (`{"page":N,"size":100,"sort":["published,desc"]}`) pages data back to 2014. + - `GET /portal/api/securityadvisory/export?format=json&from=YYYY-MM-DD` emits JSON bundles suitable for Offline Kit mirrors. +- Locale note: content is German-only; Feedser preserves `language=de` and Docs will publish a CERT-Bund glossary so operators can bridge terminology without machine translation. ## KISA / KNVD (Korea) - `https://knvd.krcert.or.kr/rss/securityInfo.do` and `/rss/securityNotice.do` return UTF-8 RSS (10-item window) with `detailDos.do?IDX=` links. No cookies required for feed fetch. -- Next actions: trace SPA detail requests to identify JSON endpoints, normalise Hangul content, and finalise localisation plan. +- Detail SPA calls resolve to `rssDetailData.do?IDX=` JSON payloads; connector fetches those directly, sanitises HTML, and records Hangul metadata (NFC). See `docs/dev/kisa_connector_notes.md` for telemetry + localisation guidance. ## BDU (Russia / FSTEC) - Candidate endpoints (`https://bdu.fstec.ru/component/rsform/form/7-bdu?format=xml/json`) return 403/404; TLS chain requires Russian Trusted Sub CA and WAF expects additional headers. diff --git a/docs/ops/feedser-cccs-operations.md b/docs/ops/feedser-cccs-operations.md new file mode 100644 index 00000000..9d423944 --- /dev/null +++ b/docs/ops/feedser-cccs-operations.md @@ -0,0 +1,72 @@ +# Feedser CCCS Connector Operations + +This runbook covers day‑to‑day operation of the Canadian Centre for Cyber Security (`source:cccs:*`) connector, including configuration, telemetry, and historical backfill guidance for English/French advisories. + +## 1. Configuration Checklist + +- Network egress (or mirrored cache) for `https://www.cyber.gc.ca/` and the JSON API endpoints under `/api/cccs/`. +- Set the Feedser options before restarting workers. Example `feedser.yaml` snippet: + +```yaml +feedser: + sources: + cccs: + feeds: + - language: "en" + uri: "https://www.cyber.gc.ca/api/cccs/threats/v1/get?lang=en&content_type=cccs_threat" + - language: "fr" + uri: "https://www.cyber.gc.ca/api/cccs/threats/v1/get?lang=fr&content_type=cccs_threat" + maxEntriesPerFetch: 80 # increase temporarily for backfill runs + maxKnownEntries: 512 + requestTimeout: "00:00:30" + requestDelay: "00:00:00.250" + failureBackoff: "00:05:00" +``` + +> ℹ️ The `/api/cccs/threats/v1/get` endpoint returns thousands of records per language (≈5 100 rows each as of 2025‑10‑14). The connector honours `maxEntriesPerFetch`, so leave it low for steady‑state and raise it for planned backfills. + +## 2. Telemetry & Logging + +- **Metrics (Meter `StellaOps.Feedser.Source.Cccs`):** + - `cccs.fetch.attempts`, `cccs.fetch.success`, `cccs.fetch.failures` + - `cccs.fetch.documents`, `cccs.fetch.unchanged` + - `cccs.parse.success`, `cccs.parse.failures`, `cccs.parse.quarantine` + - `cccs.map.success`, `cccs.map.failures` +- **Shared HTTP metrics** via `SourceDiagnostics`: + - `feedser.source.http.requests{feedser.source="cccs"}` + - `feedser.source.http.failures{feedser.source="cccs"}` + - `feedser.source.http.duration{feedser.source="cccs"}` +- **Structured logs** + - `CCCS fetch completed feeds=… items=… newDocuments=… pendingDocuments=…` + - `CCCS parse completed parsed=… failures=…` + - `CCCS map completed mapped=… failures=…` + - Warnings fire when GridFS payloads/DTOs go missing or parser sanitisation fails. + +Suggested Grafana alerts: +- `increase(cccs.fetch.failures_total[15m]) > 0` +- `rate(cccs.map.success_total[1h]) == 0` while other connectors are active +- `histogram_quantile(0.95, rate(feedser_source_http_duration_bucket{feedser_source="cccs"}[1h])) > 5s` + +## 3. Historical Backfill Plan + +1. **Snapshot the source** – the API accepts `page=` and `lang=` query parameters. `page=0` returns the full dataset (observed earliest `date_created`: 2018‑06‑08 for EN, 2018‑06‑08 for FR). Mirror those responses into Offline Kit storage when operating air‑gapped. +2. **Stage ingestion**: + - Temporarily raise `maxEntriesPerFetch` (e.g. 500) and restart Feedser workers. + - Run chained jobs until `pendingDocuments` drains: + `stella db jobs run source:cccs:fetch --and-then source:cccs:parse --and-then source:cccs:map` + - Monitor `cccs.fetch.unchanged` growth; once it approaches dataset size the backfill is complete. +3. **Optional pagination sweep** – for incremental mirrors, iterate `page=` (0…N) while `response.Count == 50`, persisting JSON to disk. Store alongside metadata (`language`, `page`, SHA256) so repeated runs detect drift. +4. **Language split** – keep EN/FR payloads separate to preserve canonical language fields. The connector emits `Language` directly from the feed entry, so mixed ingestion simply produces parallel advisories keyed by the same serial number. +5. **Throttle planning** – schedule backfills during maintenance windows; the API tolerates burst downloads but respect the 250 ms request delay or raise it if mirrored traffic is not available. + +## 4. Selector & Sanitiser Notes + +- `CccsHtmlParser` now parses the **unsanitised DOM** (via AngleSharp) and only sanitises when persisting `ContentHtml`. +- Product extraction walks headings (`Affected Products`, `Produits touchés`, `Mesures recommandées`) and consumes nested lists within `div/section/article` containers. +- `HtmlContentSanitizer` allows `

` and `
` so stored HTML keeps headings for UI rendering and downstream summarisation. + +## 5. Fixture Maintenance + +- Regression fixtures live in `src/StellaOps.Feedser.Source.Cccs.Tests/Fixtures`. +- Refresh via `UPDATE_CCCS_FIXTURES=1 dotnet test src/StellaOps.Feedser.Source.Cccs.Tests/StellaOps.Feedser.Source.Cccs.Tests.csproj`. +- Fixtures capture both EN/FR advisories with nested lists to guard against sanitiser regressions; review diffs for heading/list changes before committing. diff --git a/docs/ops/feedser-certbund-operations.md b/docs/ops/feedser-certbund-operations.md new file mode 100644 index 00000000..3a4f0a2b --- /dev/null +++ b/docs/ops/feedser-certbund-operations.md @@ -0,0 +1,134 @@ +# Feedser CERT-Bund Connector Operations + +_Last updated: 2025-10-15_ + +Germany’s Federal Office for Information Security (BSI) operates the Warn- und Informationsdienst (WID) portal. The Feedser CERT-Bund connector (`source:cert-bund:*`) ingests the public RSS feed, hydrates the portal’s JSON detail endpoint, and maps the result into canonical advisories while preserving the original German content. + +--- + +## 1. Configuration Checklist + +- Allow outbound access (or stage mirrors) for: + - `https://wid.cert-bund.de/content/public/securityAdvisory/rss` + - `https://wid.cert-bund.de/portal/` (session/bootstrap) + - `https://wid.cert-bund.de/portal/api/securityadvisory` (detail/search/export JSON) +- Ensure the HTTP client reuses a cookie container (the connector’s dependency injection wiring already sets this up). + +Example `feedser.yaml` fragment: + +```yaml +feedser: + sources: + cert-bund: + feedUri: "https://wid.cert-bund.de/content/public/securityAdvisory/rss" + portalBootstrapUri: "https://wid.cert-bund.de/portal/" + detailApiUri: "https://wid.cert-bund.de/portal/api/securityadvisory" + maxAdvisoriesPerFetch: 50 + maxKnownAdvisories: 512 + requestTimeout: "00:00:30" + requestDelay: "00:00:00.250" + failureBackoff: "00:05:00" +``` + +> Leave `maxAdvisoriesPerFetch` at 50 during normal operation. Raise it only for controlled backfills, then restore the default to avoid overwhelming the portal. + +--- + +## 2. Telemetry & Logging + +- **Meter**: `StellaOps.Feedser.Source.CertBund` +- **Counters / histograms**: + - `certbund.feed.fetch.attempts|success|failures` + - `certbund.feed.items.count` + - `certbund.feed.enqueued.count` + - `certbund.feed.coverage.days` + - `certbund.detail.fetch.attempts|success|not_modified|failures{reason}` + - `certbund.parse.success|failures{reason}` + - `certbund.parse.products.count`, `certbund.parse.cve.count` + - `certbund.map.success|failures{reason}` + - `certbund.map.affected.count`, `certbund.map.aliases.count` +- Shared HTTP metrics remain available through `feedser.source.http.*`. + +**Structured logs** (all emitted at information level when work occurs): + +- `CERT-Bund fetch cycle: … truncated {Truncated}, coverageDays={CoverageDays}` +- `CERT-Bund parse cycle: parsed {Parsed}, failures {Failures}, …` +- `CERT-Bund map cycle: mapped {Mapped}, failures {Failures}, …` + +Alerting ideas: + +1. `increase(certbund.detail.fetch.failures_total[10m]) > 0` +2. `rate(certbund.map.success_total[30m]) == 0` +3. `histogram_quantile(0.95, rate(feedser_source_http_duration_bucket{feedser_source="cert-bund"}[15m])) > 5s` + +The WebService now registers the meter so metrics surface automatically once OpenTelemetry metrics are enabled. + +--- + +## 3. Historical Backfill & Export Strategy + +### 3.1 Retention snapshot + +- RSS window: ~250 advisories (≈90 days at current cadence). +- Older advisories are accessible through the JSON search/export APIs once the anti-CSRF token is supplied. + +### 3.2 JSON search pagination + +```bash +# 1. Bootstrap cookies (client_config + XSRF-TOKEN) +curl -s -c cookies.txt "https://wid.cert-bund.de/portal/" > /dev/null +curl -s -b cookies.txt -c cookies.txt \ + -H "X-Requested-With: XMLHttpRequest" \ + "https://wid.cert-bund.de/portal/api/security/csrf" > /dev/null + +XSRF=$(awk '/XSRF-TOKEN/ {print $7}' cookies.txt) + +# 2. Page search results +curl -s -b cookies.txt \ + -H "Content-Type: application/json" \ + -H "Accept: application/json" \ + -H "X-XSRF-TOKEN: ${XSRF}" \ + -X POST \ + --data '{"page":4,"size":100,"sort":["published,desc"]}' \ + "https://wid.cert-bund.de/portal/api/securityadvisory/search" \ + > certbund-page4.json +``` + +Iterate `page` until the response `content` array is empty. Pages 0–9 currently cover 2014→present. Persist JSON responses (plus SHA256) for Offline Kit parity. + +### 3.3 Export bundles + +```bash +curl -s -b cookies.txt \ + -H "Accept: application/json" \ + -H "X-XSRF-TOKEN: ${XSRF}" \ + "https://wid.cert-bund.de/portal/api/securityadvisory/export?format=json&from=2020-01-01" \ + > certbund-2020-2025.json +``` + +Split long ranges per year and record provenance (`from`, `to`, SHA, capturedAt). Feedser can ingest these JSON payloads directly when operating offline. +Task `FEEDCONN-CERTBUND-02-009` tracks turning this workflow into a shipped Offline Kit artefact with manifests and documentation updates—coordinate with the Docs guild before publishing. + +### 3.4 Connector-driven catch-up + +1. Temporarily raise `maxAdvisoriesPerFetch` (e.g. 150) and reduce `requestDelay`. +2. Run `stella db jobs run source:cert-bund:fetch --and-then source:cert-bund:parse --and-then source:cert-bund:map` until the fetch log reports `enqueued=0`. +3. Restore defaults and capture the cursor snapshot for audit. + +--- + +## 4. Locale & Translation Guidance + +- Advisories remain in German (`language: "de"`). Preserve wording for provenance and legal accuracy. +- UI localisation: enable the translation bundles documented in `docs/15_UI_GUIDE.md` if English UI copy is required. Operators can overlay machine or human translations, but the canonical database stores the source text. +- Docs guild is compiling a CERT-Bund terminology glossary under `docs/locale/certbund-glossary.md` so downstream teams can reference consistent English equivalents without altering the stored advisories. + +--- + +## 5. Verification Checklist + +1. Observe `certbund.feed.fetch.success` and `certbund.detail.fetch.success` increments after runs; `certbund.feed.coverage.days` should hover near the observed RSS window. +2. Ensure summary logs report `truncated=false` in steady state—`true` indicates the fetch cap was hit. +3. During backfills, watch `certbund.feed.enqueued.count` trend to zero. +4. Spot-check stored advisories in Mongo to confirm `language="de"` and reference URLs match the portal detail endpoint. +5. For Offline Kit exports, validate SHA256 hashes before distribution. diff --git a/docs/ops/feedser-cisco-operations.md b/docs/ops/feedser-cisco-operations.md new file mode 100644 index 00000000..3d0ae8e8 --- /dev/null +++ b/docs/ops/feedser-cisco-operations.md @@ -0,0 +1,94 @@ +# Feedser Cisco PSIRT Connector – OAuth Provisioning SOP + +_Last updated: 2025-10-14_ + +## 1. Scope + +This runbook describes how Ops provisions, rotates, and distributes Cisco PSIRT openVuln OAuth client credentials for the Feedser Cisco connector. It covers online and air-gapped (Offline Kit) environments, quota-aware execution, and escalation paths. + +## 2. Prerequisites + +- Active Cisco.com (CCO) account with access to the Cisco API Console. +- Cisco PSIRT openVuln API entitlement (visible under “My Apps & Keys” once granted).citeturn3search0 +- Feedser configuration location (typically `/etc/stella/feedser.yaml` in production) or Offline Kit secret bundle staging directory. + +## 3. Provisioning workflow + +1. **Register the application** + - Sign in at . + - Select **Register a New App** → Application Type: `Service`, Grant Type: `Client Credentials`, API: `Cisco PSIRT openVuln API`.citeturn3search0 + - Record the generated `clientId` and `clientSecret` in the Ops vault. +2. **Verify token issuance** + - Request an access token with: + ```bash + curl -s https://id.cisco.com/oauth2/default/v1/token \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -d "grant_type=client_credentials" \ + -d "client_id=${CLIENT_ID}" \ + -d "client_secret=${CLIENT_SECRET}" + ``` + - Confirm HTTP 200 and an `expires_in` value of 3600 seconds (tokens live for one hour).citeturn3search0turn3search7 + - Preserve the response only long enough to validate syntax; do **not** persist tokens. +3. **Authorize Feedser runtime** + - Update `feedser:sources:cisco:auth` (or the module-specific secret template) with the stored credentials. + - For Offline Kit delivery, export encrypted secrets into `offline-kit/secrets/cisco-openvuln.json` using the platform’s sealed secret format. +4. **Connectivity validation** + - From the Feedser control plane, run `stella db jobs run source:vndr-cisco:fetch --dry-run`. + - Ensure the Source HTTP diagnostics record `Bearer` authorization headers and no 401/403 responses. + +## 4. Rotation SOP + +| Step | Owner | Notes | +| --- | --- | --- | +| 1. Schedule rotation | Ops (monthly board) | Rotate every 90 days or immediately after suspected credential exposure. | +| 2. Create replacement app | Ops | Repeat §3.1 with “-next” suffix; verify token issuance. | +| 3. Stage dual credentials | Ops + Feedser On-Call | Publish new credentials to secret store alongside current pair. | +| 4. Cut over | Feedser On-Call | Restart connector workers during a low-traffic window (<10 min) to pick up the new secret. | +| 5. Deactivate legacy app | Ops | Delete prior app in Cisco API Console once telemetry confirms successful fetch/parse cycles for 2 consecutive hours. | + +**Automation hooks** +- Rotation reminders are tracked in OpsRunbookOps board (`OPS-RUN-KEYS` swim lane); add checklist items for Feedser Cisco when opening a rotation task. +- Use the secret management pipeline (`ops/secrets/rotate.sh --connector cisco`) to template vault updates; the script renders a redacted diff for audit. + +## 5. Offline Kit packaging + +1. Generate the credential bundle using the Offline Kit CLI: + `offline-kit secrets add cisco-openvuln --client-id … --client-secret …` +2. Store the encrypted payload under `offline-kit/secrets/cisco-openvuln.enc`. +3. Distribute via the Offline Kit channel; update `offline-kit/MANIFEST.md` with the credential fingerprint (SHA256 of plaintext concatenated with metadata). +4. Document validation steps for the receiving site (token request from an air-gapped relay or cached token mirror). + +## 6. Quota and throttling guidance + +- Cisco enforces combined limits of 5 requests/second, 30 requests/minute, and 5 000 requests/day per application.citeturn0search0turn3search6 +- Feedser fetch jobs must respect `Retry-After` headers on HTTP 429 responses; Ops should monitor for sustained quota saturation and consider paging window adjustments. +- Telemetry to watch: `feedser.source.http.requests{feedser.source="vndr-cisco"}`, `feedser.source.http.failures{...}`, and connector-specific metrics once implemented. + +## 7. Telemetry & Monitoring + +- **Metrics (Meter `StellaOps.Feedser.Source.Vndr.Cisco`)** + - `cisco.fetch.documents`, `cisco.fetch.failures`, `cisco.fetch.unchanged` + - `cisco.parse.success`, `cisco.parse.failures` + - `cisco.map.success`, `cisco.map.failures`, `cisco.map.affected.packages` +- **Shared HTTP metrics** via `SourceDiagnostics`: + - `feedser.source.http.requests{feedser.source="vndr-cisco"}` + - `feedser.source.http.failures{feedser.source="vndr-cisco"}` + - `feedser.source.http.duration{feedser.source="vndr-cisco"}` +- **Structured logs** + - `Cisco fetch completed date=… pages=… added=…` (info) + - `Cisco parse completed parsed=… failures=…` (info) + - `Cisco map completed mapped=… failures=…` (info) + - Warnings surface when DTO serialization fails or GridFS payload is missing. +- Suggested alerts: non-zero `cisco.fetch.failures` in 15m, or `cisco.map.success` flatlines while fetch continues. + +## 8. Incident response + +- **Token compromise** – revoke the application in the Cisco API Console, purge cached secrets, rotate immediately per §4. +- **Persistent 401/403** – confirm credentials in vault, then validate token issuance; if unresolved, open a Cisco DevNet support ticket referencing the application ID. +- **429 spikes** – inspect job scheduler cadence and adjust connector options (`maxRequestsPerWindow`) before requesting higher quotas from Cisco. + +## 9. References + +- Cisco PSIRT openVuln API Authentication Guide.citeturn3search0 +- Accessing the openVuln API using curl (token lifetime).citeturn3search7 +- openVuln API rate limit documentation.citeturn0search0turn3search6 diff --git a/docs/ops/feedser-conflict-resolution.md b/docs/ops/feedser-conflict-resolution.md index 5a38d106..804e87dd 100644 --- a/docs/ops/feedser-conflict-resolution.md +++ b/docs/ops/feedser-conflict-resolution.md @@ -150,3 +150,11 @@ dotnet test src/StellaOps.Feedser.Merge.Tests/StellaOps.Feedser.Merge.Tests.cspr ``` - **Expected signals** – The triple produces one freshness-driven summary override (`primary_source=osv`, `suppressed_source=ghsa`) and one range override for the npm SemVer package while leaving `feedser.merge.conflicts` at zero. Use these values as the baseline when tuning dashboards or load-testing alert pipelines. + +--- + +## 10. Change Log + +| Date (UTC) | Change | Notes | +|------------|--------|-------| +| 2025-10-16 | Ops review signed off after connector expansion (CCCS, CERT-Bund, KISA, ICS CISA, MSRC) landed. Alert thresholds from §3 reaffirmed; dashboards updated to watch attachment signals emitted by ICS CISA connector. | Ops sign-off recorded by Feedser Ops Guild; no additional overrides required. | diff --git a/docs/ops/feedser-cve-kev-operations.md b/docs/ops/feedser-cve-kev-operations.md index 3d042ec1..c3e84bd1 100644 --- a/docs/ops/feedser-cve-kev-operations.md +++ b/docs/ops/feedser-cve-kev-operations.md @@ -18,6 +18,7 @@ feedser: apiOrg: "ORG123" apiUser: "user@example.org" apiKeyFile: "/var/run/secrets/feedser/cve-api-key" + seedDirectory: "./seed-data/cve" pageSize: 200 maxPagesPerFetch: 5 initialBackfill: "30.00:00:00" @@ -27,6 +28,8 @@ feedser: > ℹ️ Store the API key outside source control. When using `apiKeyFile`, mount the secret file into the container/host; alternatively supply `apiKey` via `FEEDSER_SOURCES__CVE__APIKEY`. +> 🪙 When credentials are not yet available, configure `seedDirectory` to point at mirrored CVE JSON (for example, the repo’s `seed-data/cve/` bundle). The connector will ingest those records and log a warning instead of failing the job; live fetching resumes automatically once `apiOrg` / `apiUser` / `apiKey` are supplied. + ### 1.2 Smoke Test (staging) 1. Deploy the updated configuration and restart the Feedser service so the connector picks up the credentials. @@ -51,6 +54,26 @@ feedser: - **Grafana pack** – Import `docs/ops/feedser-cve-kev-grafana-dashboard.json` and filter by panel legend (`CVE`, `KEV`) to reuse the canned layout. - **Backfill window** – Operators can tighten or widen `initialBackfill` / `maxPagesPerFetch` after validating throughput. Update config and restart Feedser to apply changes. +### 1.4 Staging smoke log (2025-10-15) + +While Ops finalises long-lived CVE Services credentials, we validated the connector end-to-end against the recorded CVE-2024-0001 payloads used in regression tests: + +- Command: `dotnet test src/StellaOps.Feedser.Source.Cve.Tests/StellaOps.Feedser.Source.Cve.Tests.csproj -l "console;verbosity=detailed"` +- Summary log emitted by the connector: + ``` + CVEs fetch window 2024-09-01T00:00:00Z->2024-10-01T00:00:00Z pages=1 listSuccess=1 detailDocuments=1 detailFailures=0 detailUnchanged=0 pendingDocuments=0->1 pendingMappings=0->1 hasMorePages=False nextWindowStart=2024-09-15T12:00:00Z nextWindowEnd=(none) nextPage=1 + ``` +- Telemetry captured by `Meter` `StellaOps.Feedser.Source.Cve`: + | Metric | Value | + |--------|-------| + | `cve.fetch.attempts` | 1 | + | `cve.fetch.success` | 1 | + | `cve.fetch.documents` | 1 | + | `cve.parse.success` | 1 | + | `cve.map.success` | 1 | + +The Grafana pack `docs/ops/feedser-cve-kev-grafana-dashboard.json` has been imported into staging so the panels referenced above render against these counters once the live API keys are in place. + ## 2. CISA KEV Connector (`source:kev:*`) ### 2.1 Prerequisites diff --git a/docs/ops/feedser-icscisa-operations.md b/docs/ops/feedser-icscisa-operations.md new file mode 100644 index 00000000..06b2db0d --- /dev/null +++ b/docs/ops/feedser-icscisa-operations.md @@ -0,0 +1,122 @@ +# Feedser CISA ICS Connector Operations + +This runbook documents how to provision, rotate, and validate credentials for the CISA Industrial Control Systems (ICS) connector (`source:ics-cisa:*`). Follow it before enabling the connector in staging or offline installations. + +## 1. Credential Provisioning + +1. **Create a service mailbox** reachable by the Ops crew (shared mailbox recommended). +2. Browse to `https://public.govdelivery.com/accounts/USDHSCISA/subscriber/new` and subscribe the mailbox to the following GovDelivery topics: + - `USDHSCISA_16` — ICS-CERT advisories (legacy numbering: `ICSA-YY-###`). + - `USDHSCISA_19` — ICS medical advisories (`ICSMA-YY-###`). + - `USDHSCISA_17` — ICS alerts (`IR-ALERT-YY-###`) for completeness. +3. Complete the verification email. After confirmation, note the **personalised subscription code** included in the “Manage Preferences” link. It has the shape `code=AB12CD34EF`. +4. Store the code in the shared secret vault (or Offline Kit secrets bundle) as `feedser/sources/icscisa/govdelivery/code`. + +> ℹ️ GovDelivery does not expose a one-time API key; the personalised code is what authenticates the RSS pull. Never commit it to git. + +## 2. Feed Validation + +Use the following command to confirm the feed is reachable before wiring it into Feedser (substitute `` with the personalised value): + +```bash +curl -H "User-Agent: StellaOpsFeedser/ics-cisa" \ + "https://content.govdelivery.com/accounts/USDHSCISA/topics/ICS-CERT/feed.rss?format=xml&code=" +``` + +If the endpoint returns HTTP 200 and an RSS payload, record the sample response under `docs/artifacts/icscisa/` (see Task `FEEDCONN-ICSCISA-02-007`). HTTP 403 or 406 usually means the subscription was not confirmed or the code was mistyped. + +## 3. Configuration Snippet + +Add the connector configuration to `feedser.yaml` (or equivalent environment variables): + +```yaml +feedser: + sources: + icscisa: + govDelivery: + code: "${FEEDSER_ICS_CISA_GOVDELIVERY_CODE}" + topics: + - "USDHSCISA_16" + - "USDHSCISA_19" + - "USDHSCISA_17" + rssBaseUri: "https://content.govdelivery.com/accounts/USDHSCISA" + requestDelay: "00:00:01" + failureBackoff: "00:05:00" +``` + +Environment variable example: + +```bash +export FEEDSER_SOURCES_ICSCISA_GOVDELIVERY_CODE="AB12CD34EF" +``` + +Feedser automatically register the host with the Source.Common HTTP allow-list when the connector assembly is loaded. + + +Optional tuning keys (set only when needed): + +- `proxyUri` — HTTP/HTTPS proxy URL used when Akamai blocks direct pulls. +- `requestVersion` / `requestVersionPolicy` — override HTTP negotiation when the proxy requires HTTP/1.1. +- `enableDetailScrape` — toggle HTML detail fallback (defaults to true). +- `captureAttachments` — collect PDF attachments from detail pages (defaults to true). +- `detailBaseUri` — alternate host for detail enrichment if CISA changes their layout. + +## 4. Seeding Without GovDelivery + +If credentials are still pending, populate the connector with the community CSV dataset before enabling the live fetch: + +1. Run `./scripts/fetch-ics-cisa-seed.sh` (or `.ps1`) to download the latest `CISA_ICS_ADV_*.csv` files into `seed-data/ics-cisa/`. +2. Copy the CSVs (and the generated `.sha256` files) into your Offline Kit staging area so they ship alongside the other feeds. +3. Import the kit as usual. The connector can parse the seed data for historical context, but **live GovDelivery credentials are still required** for fresh advisories. +4. Once credentials arrive, update `feedser:sources:icscisa:govDelivery:code` and re-trigger `source:ics-cisa:fetch` so the connector switches to the authorised feed. + +> The CSVs are licensed under ODbL 1.0 by the ICS Advisory Project. Preserve the attribution when redistributing them. + +## 4. Integration Validation + +1. Ensure secrets are in place and restart the Feedser workers. +2. Run a dry-run fetch/parse/map chain against an Akamai-protected topic: + ```bash + FEEDSER_SOURCES_ICSCISA_GOVDELIVERY_CODE=... \ + FEEDSER_SOURCES_ICSCISA_ENABLEDETAILSCRAPE=1 \ + stella db jobs run source:ics-cisa:fetch --and-then source:ics-cisa:parse --and-then source:ics-cisa:map + ``` +3. Confirm logs contain `ics-cisa detail fetch` entries and that new documents/DTOs include attachments (see `docs/artifacts/icscisa`). Canonical advisories should expose PDF links as `references.kind == "attachment"` and affected packages should surface `primitives.semVer.exactValue` for single-version hits. +4. If Akamai blocks direct fetches, set `feedser:sources:icscisa:proxyUri` to your allow-listed egress proxy and rerun the dry-run. + +## 4. Rotation & Incident Response + +- Review GovDelivery access quarterly. Rotate the personalised code whenever Ops changes the service mailbox password or membership. +- Revoking the subscription in GovDelivery invalidates the code immediately; update the vault and configuration in the same change. +- If the code leaks, remove the subscription (`https://public.govdelivery.com/accounts/USDHSCISA/subscriber/manage_preferences?code=`), resubscribe, and distribute the new value via the vault. + +## 5. Offline Kit Handling + +Include the personalised code in `offline-kit/secrets/feedser/icscisa.env`: + +``` +FEEDSER_SOURCES_ICSCISA_GOVDELIVERY_CODE=AB12CD34EF +``` + +The Offline Kit deployment script copies this file into the container secret directory mounted at `/run/secrets/feedser`. Ensure permissions are `600` and ownership matches the Feedser runtime user. + +## 6. Telemetry & Monitoring + +The connector emits metrics under the meter `StellaOps.Feedser.Source.Ics.Cisa`. They allow operators to track Akamai fallbacks, detail enrichment health, and advisory fan-out. + +- `icscisa.fetch.*` – counters for `attempts`, `success`, `failures`, `not_modified`, and `fallbacks`, plus histogram `icscisa.fetch.documents` showing documents added per topic pull (tags: `feedser.source`, `icscisa.topic`). +- `icscisa.parse.*` – counters for `success`/`failures` and histograms `icscisa.parse.advisories`, `icscisa.parse.attachments`, `icscisa.parse.detail_fetches` to monitor enrichment workload per feed document. +- `icscisa.detail.*` – counters `success` / `failures` per advisory (tagged with `icscisa.advisory`) to alert when Akamai blocks detail pages. +- `icscisa.map.*` – counters for `success`/`failures` and histograms `icscisa.map.references`, `icscisa.map.packages`, `icscisa.map.aliases` capturing canonical fan-out. + +Suggested alerts: + +- `increase(icscisa.fetch.failures_total[15m]) > 0` or `increase(icscisa.fetch.fallbacks_total[15m]) > 5` — sustained Akamai or proxy issues. +- `increase(icscisa.detail.failures_total[30m]) > 0` — detail enrichment breaking (potential HTML layout change). +- `histogram_quantile(0.95, rate(icscisa.map.references_bucket[1h]))` trending sharply higher — sudden advisory reference explosion worth investigating. +- Keep an eye on shared HTTP metrics (`feedser.source.http.*{feedser.source="ics-cisa"}`) for request latency and retry patterns. + +## 6. Related Tasks + +- `FEEDCONN-ICSCISA-02-009` (GovDelivery credential onboarding) — completed once this runbook is followed and secrets are placed in the vault. +- `FEEDCONN-ICSCISA-02-007` (document inventory) — archive the first successful RSS response and any attachment URL schema under `docs/artifacts/icscisa/`. diff --git a/docs/ops/feedser-kisa-operations.md b/docs/ops/feedser-kisa-operations.md new file mode 100644 index 00000000..d2d25caf --- /dev/null +++ b/docs/ops/feedser-kisa-operations.md @@ -0,0 +1,74 @@ +# Feedser KISA Connector Operations + +Operational guidance for the Korea Internet & Security Agency (KISA / KNVD) connector (`source:kisa:*`). Pair this with the engineering brief in `docs/dev/kisa_connector_notes.md`. + +## 1. Prerequisites + +- Outbound HTTPS (or mirrored cache) for `https://knvd.krcert.or.kr/`. +- Connector options defined under `feedser:sources:kisa`: + +```yaml +feedser: + sources: + kisa: + feedUri: "https://knvd.krcert.or.kr/rss/securityInfo.do" + detailApiUri: "https://knvd.krcert.or.kr/rssDetailData.do" + detailPageUri: "https://knvd.krcert.or.kr/detailDos.do" + maxAdvisoriesPerFetch: 10 + requestDelay: "00:00:01" + failureBackoff: "00:05:00" +``` + +> Ensure the URIs stay absolute—Feedser adds the `feedUri`/`detailApiUri` hosts to the HttpClient allow-list automatically. + +## 2. Staging Smoke Test + +1. Restart the Feedser workers so the KISA options bind. +2. Run a full connector cycle: + - CLI: `stella db jobs run source:kisa:fetch --and-then source:kisa:parse --and-then source:kisa:map` + - REST: `POST /jobs/run { "kind": "source:kisa:fetch", "chain": ["source:kisa:parse", "source:kisa:map"] }` +3. Confirm telemetry (Meter `StellaOps.Feedser.Source.Kisa`): + - `kisa.feed.success`, `kisa.feed.items` + - `kisa.detail.success` / `.failures` + - `kisa.parse.success` / `.failures` + - `kisa.map.success` / `.failures` + - `kisa.cursor.updates` +4. Inspect logs for structured entries: + - `KISA feed returned {ItemCount}` + - `KISA fetched detail for {Idx} … category={Category}` + - `KISA mapped advisory {AdvisoryId} (severity={Severity})` + - Absence of warnings such as `document missing GridFS payload`. +5. Validate MongoDB state: + - `raw_documents.metadata` has `kisa.idx`, `kisa.category`, `kisa.title`. + - DTO store contains `schemaVersion="kisa.detail.v1"`. + - Advisories include aliases (`IDX`, CVE) and `language="ko"`. + - `source_states` entry for `kisa` shows recent `cursor.lastFetchAt`. + +## 3. Production Monitoring + +- **Dashboards** – Add the following Prometheus/OTEL expressions: + - `rate(kisa_feed_items_total[15m])` versus `rate(feedser_source_http_requests_total{feedser_source="kisa"}[15m])` + - `increase(kisa_detail_failures_total{reason!="empty-document"}[1h])` alert at `>0` + - `increase(kisa_parse_failures_total[1h])` for storage/JSON issues + - `increase(kisa_map_failures_total[1h])` to flag schema drift + - `increase(kisa_cursor_updates_total[6h]) == 0` during active windows → warn +- **Alerts** – Page when `rate(kisa_feed_success_total[2h]) == 0` while other connectors are active; back off for maintenance windows announced on `https://knvd.krcert.or.kr/`. +- **Logs** – Watch for repeated warnings (`document missing`, `DTO missing`) or errors with reason tags `HttpRequestException`, `download`, `parse`, `map`. + +## 4. Localisation Handling + +- Hangul categories (for example `취약점정보`) flow into telemetry tags (`category=…`) and logs. Dashboards must render UTF‑8 and avoid transliteration. +- HTML content is sanitised before storage; translation teams can consume the `ContentHtml` field safely. +- Advisory severity remains as provided by KISA (`High`, `Medium`, etc.). Map-level failures include the severity tag for filtering. + +## 5. Fixture & Regression Maintenance + +- Regression fixtures: `src/StellaOps.Feedser.Source.Kisa.Tests/Fixtures/kisa-feed.xml` and `kisa-detail.json`. +- Refresh via `UPDATE_KISA_FIXTURES=1 dotnet test src/StellaOps.Feedser.Source.Kisa.Tests/StellaOps.Feedser.Source.Kisa.Tests.csproj`. +- The telemetry regression (`KisaConnectorTests.Telemetry_RecordsMetrics`) will fail if counters/log wiring drifts—treat failures as gating. + +## 6. Known Issues + +- RSS feeds only expose the latest 10 advisories; long outages require replay via archived feeds or manual IDX seeds. +- Detail endpoint occasionally throttles; the connector honours `requestDelay` and reports failures with reason `HttpRequestException`. Consider increasing delay for weekend backfills. +- If `kisa.category` tags suddenly appear as `unknown`, verify KISA has not renamed RSS elements; update the parser fixtures before production rollout. diff --git a/docs/ops/feedser-msrc-operations.md b/docs/ops/feedser-msrc-operations.md new file mode 100644 index 00000000..828b5a9c --- /dev/null +++ b/docs/ops/feedser-msrc-operations.md @@ -0,0 +1,86 @@ +# Feedser MSRC Connector – Azure AD Onboarding Brief + +_Drafted: 2025-10-15_ + +## 1. App registration requirements + +- **Tenant**: shared StellaOps production Azure AD. +- **Application type**: confidential client (web/API) issuing client credentials. +- **API permissions**: `api://api.msrc.microsoft.com/.default` (Application). Admin consent required once. +- **Token audience**: `https://api.msrc.microsoft.com/`. +- **Grant type**: client credentials. Feedser will request tokens via `POST https://login.microsoftonline.com/{tenantId}/oauth2/v2.0/token`. + +## 2. Secret/credential policy + +- Maintain two client secrets (primary + standby) rotating every 90 days. +- Store secrets in the Feedser secrets vault; Offline Kit deployments must mirror the secret payloads in their encrypted store. +- Record rotation cadence in Ops runbook and update Feedser configuration (`FEEDSER__SOURCES__VNDR__MSRC__CLIENTSECRET`) ahead of expiry. + +## 3. Feedser configuration sample + +```yaml +feedser: + sources: + vndr.msrc: + tenantId: "" + clientId: "" + clientSecret: "" + apiVersion: "2024-08-01" + locale: "en-US" + requestDelay: "00:00:00.250" + failureBackoff: "00:05:00" + cursorOverlapMinutes: 10 + downloadCvrf: false # set true to persist CVRF ZIP alongside JSON detail +``` + +## 4. CVRF artefacts + +- The MSRC REST payload exposes `cvrfUrl` per advisory. Current connector persists the link as advisory metadata and reference; it does **not** download the ZIP by default. +- Ops should mirror CVRF ZIPs when preparing Offline Kits so air-gapped deployments can reconcile advisories without direct internet access. +- Once Offline Kit storage guidelines are finalised, extend the connector configuration with `downloadCvrf: true` to enable automatic attachment retrieval. + +### 4.1 State seeding helper + +Use `tools/SourceStateSeeder` to queue historical advisories (detail JSON + optional CVRF artefacts) for replay without manual Mongo edits. Example seed file: + +```json +{ + "source": "vndr.msrc", + "cursor": { + "lastModifiedCursor": "2024-01-01T00:00:00Z" + }, + "documents": [ + { + "uri": "https://api.msrc.microsoft.com/sug/v2.0/vulnerability/ADV2024-0001", + "contentFile": "./seeds/adv2024-0001.json", + "contentType": "application/json", + "metadata": { "msrc.vulnerabilityId": "ADV2024-0001" }, + "addToPendingDocuments": true + }, + { + "uri": "https://download.microsoft.com/msrc/2024/ADV2024-0001.cvrf.zip", + "contentFile": "./seeds/adv2024-0001.cvrf.zip", + "contentType": "application/zip", + "status": "mapped", + "addToPendingDocuments": false + } + ] +} +``` + +Run the helper: + +```bash +dotnet run --project tools/SourceStateSeeder -- \ + --connection-string "mongodb://localhost:27017" \ + --database feedser \ + --input seeds/msrc-backfill.json +``` + +Any documents marked `addToPendingDocuments` will appear in the connector cursor; `DownloadCvrf` can remain disabled if the ZIP artefact is pre-seeded. + +## 5. Outstanding items + +- Ops to confirm tenant/app names and provide client credentials through the secure channel. +- Connector team monitors token cache health (already implemented); validate instrumentation once Ops supplies credentials. +- Offline Kit packaging: add encrypted blob containing client credentials with rotation instructions. diff --git a/docs/ops/feedser-nkcki-operations.md b/docs/ops/feedser-nkcki-operations.md new file mode 100644 index 00000000..4424c9ee --- /dev/null +++ b/docs/ops/feedser-nkcki-operations.md @@ -0,0 +1,48 @@ +# NKCKI Connector Operations Guide + +## Overview + +The NKCKI connector ingests JSON bulletin archives from cert.gov.ru, expanding each `*.json.zip` attachment into per-vulnerability DTOs before canonical mapping. The fetch pipeline now supports cache-backed recovery, deterministic pagination, and telemetry suitable for production monitoring. + +## Configuration + +Key options exposed through `feedser:sources:ru-nkcki:http`: + +- `maxBulletinsPerFetch` – limits new bulletin downloads in a single run (default `5`). +- `maxListingPagesPerFetch` – maximum listing pages visited during pagination (default `3`). +- `listingCacheDuration` – minimum interval between listing fetches before falling back to cached artefacts (default `00:10:00`). +- `cacheDirectory` – optional path for persisted bulletin archives used during offline or failure scenarios. +- `requestDelay` – delay inserted between bulletin downloads to respect upstream politeness. + +When operating in offline-first mode, set `cacheDirectory` to a writable path (e.g. `/var/lib/feedser/cache/ru-nkcki`) and pre-populate bulletin archives via the offline kit. + +## Telemetry + +`RuNkckiDiagnostics` emits the following metrics under meter `StellaOps.Feedser.Source.Ru.Nkcki`: + +- `nkcki.listing.fetch.attempts` / `nkcki.listing.fetch.success` / `nkcki.listing.fetch.failures` +- `nkcki.listing.pages.visited` (histogram, `pages`) +- `nkcki.listing.attachments.discovered` / `nkcki.listing.attachments.new` +- `nkcki.bulletin.fetch.success` / `nkcki.bulletin.fetch.cached` / `nkcki.bulletin.fetch.failures` +- `nkcki.entries.processed` (histogram, `entries`) + +Integrate these counters into standard Feedser observability dashboards to track crawl coverage and cache hit rates. + +## Archive Backfill Strategy + +Bitrix pagination surfaces archives via `?PAGEN_1=n`. The connector now walks up to `maxListingPagesPerFetch` pages, deduplicating bulletin IDs and maintaining a rolling `knownBulletins` window. Backfill strategy: + +1. Enumerate pages from newest to oldest, respecting `maxListingPagesPerFetch` and `listingCacheDuration` to avoid refetch storms. +2. Persist every `*.json.zip` attachment to the configured cache directory. This enables replay when listing access is temporarily blocked. +3. During archive replay, `ProcessCachedBulletinsAsync` enqueues missing documents while respecting `maxVulnerabilitiesPerFetch`. +4. For historical HTML-only advisories, collect page URLs and metadata while offline (future work: HTML and PDF extraction pipeline documented in `docs/feedser-connector-research-20251011.md`). + +For large migrations, seed caches with archived zip bundles, then run fetch/parse/map cycles in chronological order to maintain deterministic outputs. + +## Failure Handling + +- Listing failures mark the source state with exponential backoff while attempting cache replay. +- Bulletin fetches fall back to cached copies before surfacing an error. +- Mongo integration tests rely on bundled OpenSSL 1.1 libraries (`tools/openssl/linux-x64`) to keep `Mongo2Go` operational on modern distros. + +Refer to `ru-nkcki` entries in `src/StellaOps.Feedser.Source.Ru.Nkcki/TASKS.md` for outstanding items. diff --git a/docs/security/audit-events.md b/docs/security/audit-events.md index 5fcb16a9..468c7ef5 100644 --- a/docs/security/audit-events.md +++ b/docs/security/audit-events.md @@ -15,7 +15,7 @@ Audit events share the `StellaOps.Cryptography.Audit.AuthEventRecord` contract. - `Client` — `AuthEventClient` with client identifier, display name, and originating provider/plugin. - `Scopes` — granted or requested OAuth scopes (sorted before emission). - `Network` — `AuthEventNetwork` with remote address, forwarded headers, and user agent string (all treated as PII). -- `Properties` — additional `AuthEventProperty` entries for context-specific details (lockout durations, policy decisions, retries, etc.). +- `Properties` — additional `AuthEventProperty` entries for context-specific details (lockout durations, policy decisions, retries, `request.tampered`/`request.unexpected_parameter`, `bootstrap.invite_token`, etc.). ## Data Classifications @@ -33,7 +33,13 @@ Event names follow dotted notation: - `authority.password.grant` — password grant handled by OpenIddict. - `authority.client_credentials.grant` — client credential grant handling. +- `authority.token.tamper` — suspicious `/token` request detected (unexpected parameters or manipulated payload). - `authority.bootstrap.user` and `authority.bootstrap.client` — bootstrap API operations. +- `authority.bootstrap.invite.created` — operator created a bootstrap invite. +- `authority.bootstrap.invite.consumed` — invite consumed during user/client provisioning. +- `authority.bootstrap.invite.expired` — invite expired without being used. +- `authority.bootstrap.invite.rejected` — invite was rejected (invalid, mismatched provider/target, or already consumed). +- `authority.token.replay.suspected` — replay heuristics detected a token being used from a new device fingerprint. - Future additions should preserve the `authority..` pattern to keep filtering deterministic. ## Persistence diff --git a/docs/security/authority-threat-model.md b/docs/security/authority-threat-model.md index 9cbba654..f9ad5c99 100644 --- a/docs/security/authority-threat-model.md +++ b/docs/security/authority-threat-model.md @@ -82,9 +82,9 @@ flowchart LR | Threat | STRIDE Vector | Surface | Risk (L×I) | Existing Controls | Gaps / Actions | Owner | |--------|---------------|---------|------------|-------------------|----------------|-------| | Spoofed revocation bundle | Spoofing | TB5 — Authority ↔️ Agents | Med×High | Detached JWS signature (planned), offline kit checksums | Finalise signing key registry & verification script (SEC4.B/SEC4.HOST); add bundle freshness requirement | Security Guild (follow-up: **SEC5.B**) | -| Parameter tampering on `/token` | Tampering | TB1 — Public ingress | Med×High | ASP.NET model validation, OpenIddict, rate limiter (CORE8.RL) | Add audit coverage for tampered inputs, align correlation IDs with SOC (SEC2.A/SEC2.B) | Security Guild + Authority Core (follow-up: **SEC5.C**) | -| Bootstrap invite replay | Repudiation | TB4 — Operator CLI ↔️ Authority | Low×High | One-time bootstrap tokens, Argon2id hashing on creation | Enforce invite expiration + audit trail for unused invites | Security Guild (follow-up: **SEC5.D**) | -| Token replay by stolen agent | Information Disclosure | TB5 | Med×High | Planned revocation bundles, optional mTLS | Require agent binding (device fingerprint) and enforce revocation grace window alerts | Security Guild + Zastava (follow-up: **SEC5.E**) | +| Parameter tampering on `/token` | Tampering | TB1 — Public ingress | Med×High | ASP.NET model validation, OpenIddict, rate limiter (CORE8.RL) | Tampered requests emit `authority.token.tamper` audit events (`request.tampered`, unexpected parameter names) correlating with `/token` outcomes (SEC5.C) | Security Guild + Authority Core (follow-up: **SEC5.C**) | +| Bootstrap invite replay | Repudiation | TB4 — Operator CLI ↔️ Authority | Low×High | One-time bootstrap tokens, Argon2id hashing on creation | Invites expire automatically and emit audit events on consumption/expiration (SEC5.D) | Security Guild | +| Token replay by stolen agent | Information Disclosure | TB5 | Med×High | Signed revocation bundles, device fingerprint heuristics, optional mTLS | Monitor revocation acknowledgement latency via Zastava and tune replay alerting thresholds | Security Guild + Zastava (follow-up: **SEC5.E**) | | Privilege escalation via plug-in override | Elevation of Privilege | TB3 — Plug-in sandbox | Med×High | Signed plug-ins, restart-only loading, configuration validation | Add static analysis on manifest overrides + runtime warning when policy weaker than host | Security Guild + DevOps (follow-up: **SEC5.F**) | | Offline bundle tampering | Tampering | Distribution | Low×High | SHA256 manifest, signed bundles (planned) | Add supply-chain attestation for Offline Kit, publish verification CLI in docs | Security Guild + Ops (follow-up: **SEC5.G**) | | Failure to log denied tokens | Repudiation | TB2 — Authority ↔️ Mongo | Med×Med | Serilog structured events (partial), Mongo persistence path (planned) | Finalise audit schema (SEC2.A) and ensure `/token` denies include subject/client/IP fields | Security Guild + Authority Core (follow-up: **SEC5.H**) | @@ -98,7 +98,7 @@ Risk scoring uses qualitative scale (Low/Med/High) for likelihood × impact; mit | SEC5.B | Spoofed revocation bundle | Complete libsodium/Core signing integration and ship revocation verification script. | Security Guild + Authority Core | | SEC5.C | Parameter tampering on `/token` | Finalise audit contract (`SEC2.A`) and add request tamper logging. | Security Guild + Authority Core | | SEC5.D | Bootstrap invite replay | Implement expiry enforcement + audit coverage for unused bootstrap invites. | Security Guild | -| SEC5.E | Token replay by stolen agent | Document device binding requirements and create detector for stale revocation acknowledgements. | Security Guild + Zastava | +| SEC5.E | Token replay by stolen agent | Coordinate Zastava alerting with the new device fingerprint heuristics and surface stale revocation acknowledgements. | Security Guild + Zastava | | SEC5.F | Plug-in override escalation | Static analysis of plug-in manifests; warn on weaker password policy overrides. | Security Guild + DevOps | | SEC5.G | Offline bundle tampering | Extend Offline Kit build to include attested manifest + verification CLI sample. | Security Guild + Ops | | SEC5.H | Failure to log denied tokens | Ensure audit persistence for all `/token` denials with correlation IDs. | Security Guild + Authority Core | diff --git a/docs/security/rate-limits.md b/docs/security/rate-limits.md new file mode 100644 index 00000000..a6f561dc --- /dev/null +++ b/docs/security/rate-limits.md @@ -0,0 +1,76 @@ +# StellaOps Authority Rate Limit Guidance + +StellaOps Authority applies fixed-window rate limiting to critical endpoints so that brute-force and burst traffic are throttled before they can exhaust downstream resources. This guide complements the lockout policy documentation and captures the recommended defaults, override scenarios, and monitoring practices for `/token`, `/authorize`, and `/internal/*` routes. + +## Configuration Overview + +Rate limits live under `security.rateLimiting` in `authority.yaml` (and map to the same hierarchy for environment variables). Each endpoint exposes: + +- `enabled` — toggles the limiter. +- `permitLimit` — maximum requests per fixed window. +- `window` — window duration expressed as an ISO-8601 timespan (e.g., `00:01:00`). +- `queueLimit` — number of requests allowed to queue when the window is exhausted. + +```yaml +security: + rateLimiting: + token: + enabled: true + permitLimit: 30 + window: 00:01:00 + queueLimit: 0 + authorize: + enabled: true + permitLimit: 60 + window: 00:01:00 + queueLimit: 10 + internal: + enabled: false + permitLimit: 5 + window: 00:01:00 + queueLimit: 0 +``` + +When limits trigger, middleware decorates responses with `Retry-After` headers and log tags (`authority.endpoint`, `authority.client_id`, `authority.remote_ip`) so operators can correlate events with clients and source IPs. + +Environment overrides follow the same hierarchy. For example: + +``` +STELLAOPS_AUTHORITY__SECURITY__RATELIMITING__TOKEN__PERMITLIMIT=60 +STELLAOPS_AUTHORITY__SECURITY__RATELIMITING__TOKEN__WINDOW=00:01:00 +``` + +## Recommended Profiles + +| Scenario | permitLimit | window | queueLimit | Notes | +|----------|-------------|--------|------------|-------| +| Default production | 30 | 60s | 0 | Balances anonymous quota (33 scans/day) with headroom for tenant bursts. | +| High-trust clustered IPs | 60 | 60s | 5 | Requires WAF allowlist + alert `aspnetcore_rate_limiting_rejections_total{limiter="authority-token"} <= 1%` sustained. | +| Air-gapped lab | 10 | 120s | 0 | Lower concurrency reduces noise when running from shared bastion hosts. | +| Incident lockdown | 5 | 300s | 0 | Pair with credential lockout limit of 3 attempts and SOC paging for each denial. | + +### Lockout Interplay + +- Rate limiting throttles by IP/client; lockout policies apply per subject. Keep both enabled. +- During lockdown scenarios, reduce `security.lockout.maxFailures` alongside the rate limits above so that subjects face quicker escalation. +- Map support playbooks to the observed `Retry-After` value: anything above 120 seconds should trigger manual investigation before re-enabling clients. + +## Monitoring and Alerts + +1. **Metrics** + - `aspnetcore_rate_limiting_rejections_total{limiter="authority-token"}` for `/token`. + - `aspnetcore_rate_limiting_rejections_total{limiter="authority-authorize"}` for `/authorize`. + - Custom counters derived from the structured log tags (`authority.remote_ip`, `authority.client_id`). +2. **Dashboards** + - Requests vs. rejections per endpoint. + - Top offending clients/IP ranges in the current window. + - Heatmap of retry-after durations to spot persistent throttling. +3. **Alerts** + - Notify SOC when 429 rates exceed 25 % for five consecutive minutes on any limiter. + - Trigger client-specific alerts when a single client_id produces >100 throttle events/hour. + +## Operational Checklist + +- Validate updated limits in staging before production rollout; smoke-test with representative workload. +- When raising limits, confirm audit events continue to capture `authority.client_id`, `authority.remote_ip`, and correlation IDs for throttle responses. +- Document any overrides in the change log, including justification and expiry review date. diff --git a/docs/security/revocation-bundle.md b/docs/security/revocation-bundle.md index 417a0d8d..657c10e0 100644 --- a/docs/security/revocation-bundle.md +++ b/docs/security/revocation-bundle.md @@ -43,6 +43,7 @@ Consumers MUST treat the combination of `schemaVersion` and `sequence` as a mono { "alg": "ES256", "kid": "{signingKeyId}", + "provider": "{providerName}", "typ": "application/vnd.stellaops.revocation-bundle+jws", "b64": false, "crit": ["b64"] @@ -54,8 +55,28 @@ Verification steps: 1. Validate `revocation-bundle.json` against the schema. 2. Re-compute SHA-256 and compare with `.sha256` (if present). -3. Resolve the signing key from JWKS (`/.well-known/jwks.json`) or the offline key bundle. -4. Verify the detached JWS using the stored signing key (example tooling coming with `stella auth revoke verify`). +3. Resolve the signing key from JWKS (`/.well-known/jwks.json`) or the offline key bundle, preferring the provider declared in the JWS header (`provider` falls back to `default`). +4. Verify the detached JWS using the resolved provider. The CLI mirrors Authority resolution, so builds compiled with `StellaOpsCryptoSodium=true` automatically use the libsodium provider when advertised; otherwise verification downgrades to the managed fallback. + +### CLI verification workflow + +Use the bundled CLI command before distributing a bundle: + +```bash +stellaops auth revoke verify \ + --bundle artifacts/revocation-bundle.json \ + --signature artifacts/revocation-bundle.json.jws \ + --key etc/authority/signing/authority-public.pem \ + --verbose +``` + +The verifier performs three checks: + +1. Prints the computed digest in `sha256:` format. Compare it with the exported `.sha256` artefact. +2. Confirms the detached JWS header advertises `b64: false`, captures the provider hint, and that the algorithm matches the Authority configuration (ES256 unless overridden). +3. Registers the supplied PEM key with the crypto provider registry and validates the signature (falling back to the managed provider when the hinted provider is unavailable). + +A zero exit code means the bundle is ready for mirroring/import. Non-zero codes signal missing arguments, malformed JWS payloads, or signature mismatches; regenerate or re-sign the bundle before distribution. ## Example @@ -64,7 +85,7 @@ The repository contains an [example bundle](revocation-bundle-example.json) demo ## Operations Quick Reference - `stella auth revoke export` emits a canonical JSON bundle, `.sha256` digest, and detached JWS signature in one command. Use `--output` to write into your mirror staging directory. -- `stella auth revoke verify` validates a bundle using cached JWKS or an offline PEM key and reports digest mismatches before distribution. +- `stella auth revoke verify` validates a bundle using cached JWKS or an offline PEM key, honours the `provider` metadata embedded in the signature, and reports digest mismatches before distribution. - `POST /internal/revocations/export` provides the same payload for orchestrators that already talk to the bootstrap API. - `POST /internal/signing/rotate` rotates JWKS material without downtime; always export a fresh bundle afterward so downstream mirrors receive signatures from the new `kid`. - Offline Kit automation should mirror `revocation-bundle.json*` alongside Feedser exports so agents ingest revocations during the same sync pass. diff --git a/etc/feedser.yaml.sample b/etc/feedser.yaml.sample index 25038d57..a36cdd1f 100644 --- a/etc/feedser.yaml.sample +++ b/etc/feedser.yaml.sample @@ -83,3 +83,15 @@ sources: failureBackoff: "00:05:00" rateLimitWarningThreshold: 500 secondaryRateLimitBackoff: "00:02:00" + cve: + baseEndpoint: "https://cveawg.mitre.org/api/" + apiOrg: "" + apiUser: "" + apiKey: "" + # Optional mirror used when credentials are unavailable. + seedDirectory: "./seed-data/cve" + pageSize: 200 + maxPagesPerFetch: 5 + initialBackfill: "30.00:00:00" + requestDelay: "00:00:00.250" + failureBackoff: "00:10:00" diff --git a/scripts/fetch-ics-cisa-seed.ps1 b/scripts/fetch-ics-cisa-seed.ps1 new file mode 100644 index 00000000..1f9e7acb --- /dev/null +++ b/scripts/fetch-ics-cisa-seed.ps1 @@ -0,0 +1,38 @@ +param( + [string]$Destination = "$(Join-Path (Split-Path -Parent $PSCommandPath) '..' | Resolve-Path)/seed-data/ics-cisa" +) + +$ErrorActionPreference = 'Stop' +New-Item -Path $Destination -ItemType Directory -Force | Out-Null + +Function Write-Info($Message) { Write-Host "[ics-seed] $Message" } +Function Write-ErrorLine($Message) { Write-Host "[ics-seed][error] $Message" -ForegroundColor Red } + +Function Download-File($Url, $Path) { + Write-Info "Downloading $(Split-Path $Path -Leaf)" + Invoke-WebRequest -Uri $Url -OutFile $Path -UseBasicParsing + $hash = Get-FileHash -Path $Path -Algorithm SHA256 + $hash.Hash | Out-File -FilePath "$Path.sha256" -Encoding ascii +} + +$base = 'https://raw.githubusercontent.com/icsadvprj/ICS-Advisory-Project/main/ICS-CERT_ADV' +$master = 'CISA_ICS_ADV_Master.csv' +$snapshot = 'CISA_ICS_ADV_2025_10_09.csv' + +Write-Info 'Fetching ICS advisories seed data (ODbL v1.0)' +Download-File "$base/$master" (Join-Path $Destination $master) +Download-File "$base/$snapshot" (Join-Path $Destination $snapshot) + +$medicalUrl = 'https://raw.githubusercontent.com/batarr22/ICSMA_CSV/main/ICSMA_CSV_4-20-2023.xlsx' +$medicalFile = 'ICSMA_CSV_4-20-2023.xlsx' +Write-Info 'Fetching community ICSMA snapshot' +try { + Download-File $medicalUrl (Join-Path $Destination $medicalFile) +} +catch { + Write-ErrorLine "Unable to download $medicalFile (optional): $_" + Remove-Item (Join-Path $Destination $medicalFile) -ErrorAction SilentlyContinue +} + +Write-Info "Seed data ready in $Destination" +Write-Info 'Remember: data is licensed under ODbL v1.0 (see seed README).' diff --git a/scripts/fetch-ics-cisa-seed.sh b/scripts/fetch-ics-cisa-seed.sh new file mode 100644 index 00000000..7cf7150f --- /dev/null +++ b/scripts/fetch-ics-cisa-seed.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +DEST_DIR="${1:-$ROOT_DIR/seed-data/ics-cisa}" +mkdir -p "$DEST_DIR" + +info() { printf "[ics-seed] %s\n" "$*"; } +error() { printf "[ics-seed][error] %s\n" "$*" >&2; } + +download() { + local url="$1" + local target="$2" + info "Downloading $(basename "$target")" + curl -fL "$url" -o "$target" + sha256sum "$target" > "$target.sha256" +} + +BASE="https://raw.githubusercontent.com/icsadvprj/ICS-Advisory-Project/main/ICS-CERT_ADV" +MASTER_FILE="CISA_ICS_ADV_Master.csv" +SNAPSHOT_2025="CISA_ICS_ADV_2025_10_09.csv" + +info "Fetching ICS advisories seed data (ODbL v1.0)" +download "$BASE/$MASTER_FILE" "$DEST_DIR/$MASTER_FILE" +download "$BASE/$SNAPSHOT_2025" "$DEST_DIR/$SNAPSHOT_2025" + +MEDICAL_URL="https://raw.githubusercontent.com/batarr22/ICSMA_CSV/main/ICSMA_CSV_4-20-2023.xlsx" +MEDICAL_FILE="ICSMA_CSV_4-20-2023.xlsx" +info "Fetching community ICSMA snapshot" +if curl -fL "$MEDICAL_URL" -o "$DEST_DIR/$MEDICAL_FILE"; then + sha256sum "$DEST_DIR/$MEDICAL_FILE" > "$DEST_DIR/$MEDICAL_FILE.sha256" +else + error "Unable to download $MEDICAL_FILE (optional)." + rm -f "$DEST_DIR/$MEDICAL_FILE" +fi + +info "Seed data ready in $DEST_DIR" +info "Remember: data is licensed under ODbL v1.0 (see seed README)." diff --git a/seed-data/cve/2025-10-15/CVE-2024-0001.json b/seed-data/cve/2025-10-15/CVE-2024-0001.json new file mode 100644 index 00000000..b9b89bfc --- /dev/null +++ b/seed-data/cve/2025-10-15/CVE-2024-0001.json @@ -0,0 +1,72 @@ +{ + "dataType": "CVE_RECORD", + "dataVersion": "5.0", + "cveMetadata": { + "cveId": "CVE-2024-0001", + "assignerShortName": "ExampleOrg", + "state": "PUBLISHED", + "dateReserved": "2024-01-01T00:00:00Z", + "datePublished": "2024-09-10T12:00:00Z", + "dateUpdated": "2024-09-15T12:00:00Z" + }, + "containers": { + "cna": { + "title": "Example Product Remote Code Execution", + "descriptions": [ + { + "lang": "en", + "value": "An example vulnerability allowing remote attackers to execute arbitrary code." + } + ], + "affected": [ + { + "vendor": "ExampleVendor", + "product": "ExampleProduct", + "platform": "linux", + "defaultStatus": "affected", + "versions": [ + { + "status": "affected", + "version": "1.0.0", + "lessThan": "1.2.0", + "versionType": "semver" + }, + { + "status": "unaffected", + "version": "1.2.0", + "versionType": "semver" + } + ] + } + ], + "references": [ + { + "url": "https://example.com/security/advisory", + "name": "Vendor Advisory", + "tags": [ + "vendor-advisory" + ] + }, + { + "url": "https://cve.example.com/CVE-2024-0001", + "tags": [ + "third-party-advisory" + ] + } + ], + "metrics": [ + { + "cvssV3_1": { + "version": "3.1", + "vectorString": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + "baseScore": 9.8, + "baseSeverity": "CRITICAL" + } + } + ], + "aliases": [ + "GHSA-xxxx-yyyy-zzzz" + ] + } + } +} diff --git a/seed-data/cve/2025-10-15/CVE-2024-4567.json b/seed-data/cve/2025-10-15/CVE-2024-4567.json new file mode 100644 index 00000000..65805afa --- /dev/null +++ b/seed-data/cve/2025-10-15/CVE-2024-4567.json @@ -0,0 +1,147 @@ +{ + "dataType": "CVE_RECORD", + "dataVersion": "5.1", + "cveMetadata": { + "cveId": "CVE-2024-4567", + "assignerOrgId": "b15e7b5b-3da4-40ae-a43c-f7aa60e62599", + "state": "PUBLISHED", + "assignerShortName": "Wordfence", + "dateReserved": "2024-05-06T19:34:14.071Z", + "datePublished": "2024-05-09T20:03:38.213Z", + "dateUpdated": "2024-08-01T20:47:40.724Z" + }, + "containers": { + "cna": { + "providerMetadata": { + "orgId": "b15e7b5b-3da4-40ae-a43c-f7aa60e62599", + "shortName": "Wordfence", + "dateUpdated": "2024-05-09T20:03:38.213Z" + }, + "affected": [ + { + "vendor": "themifyme", + "product": "Themify Shortcodes", + "versions": [ + { + "version": "*", + "status": "affected", + "lessThanOrEqual": "2.0.9", + "versionType": "semver" + } + ], + "defaultStatus": "unaffected" + } + ], + "descriptions": [ + { + "lang": "en", + "value": "The Themify Shortcodes plugin for WordPress is vulnerable to Stored Cross-Site Scripting via the plugin's themify_button shortcode in all versions up to, and including, 2.0.9 due to insufficient input sanitization and output escaping on user supplied attributes. This makes it possible for authenticated attackers, with contributor-level access and above, to inject arbitrary web scripts in pages that will execute whenever a user accesses an injected page." + } + ], + "title": "Themify Shortcodes <= 2.0.9 - Authenticated (Contributor+) Stored Cross-Site Scripting via themify_button Shortcode", + "references": [ + { + "url": "https://www.wordfence.com/threat-intel/vulnerabilities/id/c63ff9d7-6a14-4186-8550-4e5c50855e7f?source=cve" + }, + { + "url": "https://plugins.trac.wordpress.org/changeset/3082885/themify-shortcodes" + } + ], + "problemTypes": [ + { + "descriptions": [ + { + "lang": "en", + "description": "CWE-79 Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')" + } + ] + } + ], + "metrics": [ + { + "cvssV3_1": { + "version": "3.1", + "vectorString": "CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:C/C:L/I:L/A:N", + "baseScore": 6.4, + "baseSeverity": "MEDIUM" + } + } + ], + "credits": [ + { + "lang": "en", + "type": "finder", + "value": "Francesco Carlucci" + } + ], + "timeline": [ + { + "time": "2024-05-06T00:00:00.000+00:00", + "lang": "en", + "value": "Vendor Notified" + }, + { + "time": "2024-05-08T00:00:00.000+00:00", + "lang": "en", + "value": "Disclosed" + } + ] + }, + "adp": [ + { + "title": "CISA ADP Vulnrichment", + "metrics": [ + { + "other": { + "type": "ssvc", + "content": { + "id": "CVE-2024-4567", + "role": "CISA Coordinator", + "options": [ + { + "Exploitation": "none" + }, + { + "Automatable": "no" + }, + { + "Technical Impact": "partial" + } + ], + "version": "2.0.3", + "timestamp": "2024-05-11T16:56:12.695905Z" + } + } + } + ], + "providerMetadata": { + "orgId": "134c704f-9b21-4f2e-91b3-4a467353bcc0", + "shortName": "CISA-ADP", + "dateUpdated": "2024-06-04T17:54:44.162Z" + } + }, + { + "providerMetadata": { + "orgId": "af854a3a-2127-422b-91ae-364da2661108", + "shortName": "CVE", + "dateUpdated": "2024-08-01T20:47:40.724Z" + }, + "title": "CVE Program Container", + "references": [ + { + "url": "https://www.wordfence.com/threat-intel/vulnerabilities/id/c63ff9d7-6a14-4186-8550-4e5c50855e7f?source=cve", + "tags": [ + "x_transferred" + ] + }, + { + "url": "https://plugins.trac.wordpress.org/changeset/3082885/themify-shortcodes", + "tags": [ + "x_transferred" + ] + } + ] + } + ] + } +} diff --git a/seed-data/ics-cisa/README.md b/seed-data/ics-cisa/README.md new file mode 100644 index 00000000..1c314a11 --- /dev/null +++ b/seed-data/ics-cisa/README.md @@ -0,0 +1,19 @@ +# CISA ICS Advisory Seed Data + +This directory is reserved for **seed data** sourced from the community-maintained [ICS Advisory Project](https://github.com/icsadvprj/ICS-Advisory-Project). The project republishes CISA ICS advisories under the **Open Database License (ODbL) v1.0**. StellaOps uses these CSV snapshots to bootstrap offline environments before the official GovDelivery credentials arrive. + +> ⚠️ **Licence notice** – By downloading and using the CSV files you agree to the ODbL requirements (attribution, share-alike, and notice preservation). See [`LICENSE-ODBL.md`](https://github.com/icsadvprj/ICS-Advisory-Project/blob/main/LICENSE.md) for the full text. + +## Usage + +1. Run `scripts/fetch-ics-cisa-seed.sh` (or the PowerShell variant) to download the latest snapshots into this directory. +2. The files are ignored by Git to avoid committing third-party data; include them explicitly when building an Offline Update Kit. +3. When you later switch to live GovDelivery ingestion, keep the CSVs around as historical fixtures—do **not** treat them as an authoritative source once the live connector is enabled. + +### Suggested Artefacts + +- `CISA_ICS_ADV_Master.csv` – cumulative advisory dataset (2010 → present) +- `CISA_ICS_ADV_.csv` – point-in-time snapshots +- `ICSMA_CSV_.xlsx` – medical device advisories (optional, sourced from the community mirror) + +Keep the generated SHA-256 files alongside the CSVs so Offline Kit packaging can verify integrity. diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardClientProvisioningStoreTests.cs b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardClientProvisioningStoreTests.cs index 125d2c17..a0fb6f8d 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardClientProvisioningStoreTests.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardClientProvisioningStoreTests.cs @@ -15,7 +15,8 @@ public class StandardClientProvisioningStoreTests public async Task CreateOrUpdateAsync_HashesSecretAndPersistsDocument() { var store = new TrackingClientStore(); - var provisioning = new StandardClientProvisioningStore("standard", store); + var revocations = new TrackingRevocationStore(); + var provisioning = new StandardClientProvisioningStore("standard", store, revocations, TimeProvider.System); var registration = new AuthorityClientRegistration( clientId: "bootstrap-client", @@ -63,4 +64,21 @@ public class StandardClientProvisioningStoreTests return ValueTask.FromResult(removed); } } + + private sealed class TrackingRevocationStore : IAuthorityRevocationStore + { + public List Upserts { get; } = new(); + + public ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken) + { + Upserts.Add(document); + return ValueTask.CompletedTask; + } + + public ValueTask RemoveAsync(string category, string revocationId, CancellationToken cancellationToken) + => ValueTask.FromResult(true); + + public ValueTask> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken) + => ValueTask.FromResult>(Array.Empty()); + } } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginRegistrarTests.cs b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginRegistrarTests.cs index ef7c0f9e..5c95104c 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginRegistrarTests.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginRegistrarTests.cs @@ -5,6 +5,7 @@ using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.Options; using Mongo2Go; @@ -58,6 +59,21 @@ public class StandardPluginRegistrarTests services.AddLogging(); services.AddSingleton(database); services.AddSingleton(new InMemoryClientStore()); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(TimeProvider.System); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(TimeProvider.System); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(TimeProvider.System); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(TimeProvider.System); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(TimeProvider.System); + services.AddSingleton(TimeProvider.System); + services.AddSingleton(TimeProvider.System); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(new StubRevocationStore()); var registrar = new StandardPluginRegistrar(); registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration)); @@ -83,6 +99,53 @@ public class StandardPluginRegistrarTests Assert.True(verification.User?.RequiresPasswordReset); } + [Fact] + public void Register_LogsWarning_WhenPasswordPolicyWeaker() + { + using var runner = MongoDbRunner.Start(singleNodeReplSet: true); + var client = new MongoClient(runner.ConnectionString); + var database = client.GetDatabase("registrar-password-policy"); + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary + { + ["passwordPolicy:minimumLength"] = "6", + ["passwordPolicy:requireUppercase"] = "false", + ["passwordPolicy:requireLowercase"] = "false", + ["passwordPolicy:requireDigit"] = "false", + ["passwordPolicy:requireSymbol"] = "false" + }) + .Build(); + + var manifest = new AuthorityPluginManifest( + "standard", + "standard", + true, + typeof(StandardPluginRegistrar).Assembly.GetName().Name, + typeof(StandardPluginRegistrar).Assembly.Location, + new[] { AuthorityPluginCapabilities.Password }, + new Dictionary(), + "standard.yaml"); + + var pluginContext = new AuthorityPluginContext(manifest, configuration); + var services = new ServiceCollection(); + var loggerProvider = new CapturingLoggerProvider(); + services.AddLogging(builder => builder.AddProvider(loggerProvider)); + services.AddSingleton(database); + services.AddSingleton(new InMemoryClientStore()); + + var registrar = new StandardPluginRegistrar(); + registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration)); + + using var provider = services.BuildServiceProvider(); + _ = provider.GetRequiredService(); + + Assert.Contains(loggerProvider.Entries, entry => + entry.Level == LogLevel.Warning && + entry.Category.Contains(typeof(StandardPluginRegistrar).FullName!, StringComparison.Ordinal) && + entry.Message.Contains("weaker password policy", StringComparison.OrdinalIgnoreCase)); + } + [Fact] public void Register_ForcesPasswordCapability_WhenManifestMissing() { @@ -106,6 +169,8 @@ public class StandardPluginRegistrarTests services.AddLogging(); services.AddSingleton(database); services.AddSingleton(new InMemoryClientStore()); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(TimeProvider.System); var registrar = new StandardPluginRegistrar(); registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration)); @@ -209,6 +274,61 @@ public class StandardPluginRegistrarTests } } +internal sealed record CapturedLogEntry(string Category, LogLevel Level, string Message); + +internal sealed class CapturingLoggerProvider : ILoggerProvider +{ + public List Entries { get; } = new(); + + public ILogger CreateLogger(string categoryName) => new CapturingLogger(categoryName, Entries); + + public void Dispose() + { + } + + private sealed class CapturingLogger : ILogger + { + private readonly string category; + private readonly List entries; + + public CapturingLogger(string category, List entries) + { + this.category = category; + this.entries = entries; + } + + public IDisposable BeginScope(TState state) where TState : notnull => NullScope.Instance; + + public bool IsEnabled(LogLevel logLevel) => true; + + public void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter) + { + entries.Add(new CapturedLogEntry(category, logLevel, formatter(state, exception))); + } + + private sealed class NullScope : IDisposable + { + public static readonly NullScope Instance = new(); + + public void Dispose() + { + } + } + } +} + +internal sealed class StubRevocationStore : IAuthorityRevocationStore +{ + public ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken) + => ValueTask.CompletedTask; + + public ValueTask RemoveAsync(string category, string revocationId, CancellationToken cancellationToken) + => ValueTask.FromResult(false); + + public ValueTask> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken) + => ValueTask.FromResult>(Array.Empty()); +} + internal sealed class InMemoryClientStore : IAuthorityClientStore { private readonly Dictionary clients = new(StringComparer.OrdinalIgnoreCase); diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginOptions.cs b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginOptions.cs index 46122d7c..86cec8dd 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginOptions.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginOptions.cs @@ -71,6 +71,41 @@ internal sealed class PasswordPolicyOptions throw new InvalidOperationException($"Standard plugin '{pluginName}' requires passwordPolicy.minimumLength to be greater than zero."); } } + + public bool IsWeakerThan(PasswordPolicyOptions other) + { + if (other is null) + { + return false; + } + + if (MinimumLength < other.MinimumLength) + { + return true; + } + + if (!RequireUppercase && other.RequireUppercase) + { + return true; + } + + if (!RequireLowercase && other.RequireLowercase) + { + return true; + } + + if (!RequireDigit && other.RequireDigit) + { + return true; + } + + if (!RequireSymbol && other.RequireSymbol) + { + return true; + } + + return false; + } } internal sealed class LockoutOptions diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginRegistrar.cs b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginRegistrar.cs index f0595a3c..d7857413 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginRegistrar.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginRegistrar.cs @@ -51,6 +51,25 @@ internal sealed class StandardPluginRegistrar : IAuthorityPluginRegistrar var cryptoProvider = sp.GetRequiredService(); var passwordHasher = new CryptoPasswordHasher(pluginOptions, cryptoProvider); var loggerFactory = sp.GetRequiredService(); + var registrarLogger = loggerFactory.CreateLogger(); + + var baselinePolicy = new PasswordPolicyOptions(); + if (pluginOptions.PasswordPolicy.IsWeakerThan(baselinePolicy)) + { + registrarLogger.LogWarning( + "Standard plugin '{Plugin}' configured a weaker password policy (minLength={Length}, requireUpper={Upper}, requireLower={Lower}, requireDigit={Digit}, requireSymbol={Symbol}) than the baseline (minLength={BaseLength}, requireUpper={BaseUpper}, requireLower={BaseLower}, requireDigit={BaseDigit}, requireSymbol={BaseSymbol}).", + pluginName, + pluginOptions.PasswordPolicy.MinimumLength, + pluginOptions.PasswordPolicy.RequireUppercase, + pluginOptions.PasswordPolicy.RequireLowercase, + pluginOptions.PasswordPolicy.RequireDigit, + pluginOptions.PasswordPolicy.RequireSymbol, + baselinePolicy.MinimumLength, + baselinePolicy.RequireUppercase, + baselinePolicy.RequireLowercase, + baselinePolicy.RequireDigit, + baselinePolicy.RequireSymbol); + } return new StandardUserCredentialStore( pluginName, diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md index 44f0cb79..6f7190fa 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md +++ b/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md @@ -5,12 +5,14 @@ | PLG6.DOC | DONE (2025-10-11) | BE-Auth Plugin, Docs Guild | PLG1–PLG5 | Final polish + diagrams for plugin developer guide (AUTHPLUG-DOCS-01-001). | Docs team delivers copy-edit + exported diagrams; PR merged. | | SEC1.PLG | DONE (2025-10-11) | Security Guild, BE-Auth Plugin | SEC1.A (StellaOps.Cryptography) | Swap Standard plugin hashing to Argon2id via `StellaOps.Cryptography` abstractions; keep PBKDF2 verification for legacy. | ✅ `StandardUserCredentialStore` uses `ICryptoProvider` to hash/check; ✅ Transparent rehash on success; ✅ Unit tests cover tamper + legacy rehash. | | SEC1.OPT | DONE (2025-10-11) | Security Guild | SEC1.PLG | Expose password hashing knobs in `StandardPluginOptions` (`memoryKiB`, `iterations`, `parallelism`, `algorithm`) with validation. | ✅ Options bound from YAML; ✅ Invalid configs throw; ✅ Docs include tuning guidance. | -| SEC2.PLG | TODO | Security Guild, Storage Guild | SEC2.A (audit contract) | Emit audit events from password verification outcomes and persist via `IAuthorityLoginAttemptStore`. | ✅ Serilog events enriched with subject/client/IP/outcome; ✅ Mongo records written per attempt; ✅ Tests assert success/lockout/failure cases. | -| SEC3.PLG | TODO | Security Guild, BE-Auth Plugin | CORE8, SEC3.A (rate limiter) | Ensure lockout responses and rate-limit metadata flow through plugin logs/events (include retry-after). | ✅ Audit record includes retry-after; ✅ Tests confirm lockout + limiter interplay. | +| SEC2.PLG | DOING (2025-10-14) | Security Guild, Storage Guild | SEC2.A (audit contract) | Emit audit events from password verification outcomes and persist via `IAuthorityLoginAttemptStore`. | ✅ Serilog events enriched with subject/client/IP/outcome; ✅ Mongo records written per attempt; ✅ Tests assert success/lockout/failure cases. | +| SEC3.PLG | DOING (2025-10-14) | Security Guild, BE-Auth Plugin | CORE8, SEC3.A (rate limiter) | Ensure lockout responses and rate-limit metadata flow through plugin logs/events (include retry-after). | ✅ Audit record includes retry-after; ✅ Tests confirm lockout + limiter interplay. | | SEC4.PLG | DONE (2025-10-12) | Security Guild | SEC4.A (revocation schema) | Provide plugin hooks so revoked users/clients write reasons for revocation bundle export. | ✅ Revocation exporter consumes plugin data; ✅ Tests cover revoked user/client output. | -| SEC5.PLG | TODO | Security Guild | SEC5.A (threat model) | Address plugin-specific mitigations (bootstrap user handling, password policy docs) in threat model backlog. | ✅ Threat model lists plugin attack surfaces; ✅ Mitigation items filed. | +| SEC5.PLG | DOING (2025-10-14) | Security Guild | SEC5.A (threat model) | Address plugin-specific mitigations (bootstrap user handling, password policy docs) in threat model backlog. | ✅ Threat model lists plugin attack surfaces; ✅ Mitigation items filed. | | PLG4-6.CAPABILITIES | BLOCKED (2025-10-12) | BE-Auth Plugin, Docs Guild | PLG1–PLG3 | Finalise capability metadata exposure, config validation, and developer guide updates; remaining action is Docs polish/diagram export. | ✅ Capability metadata + validation merged; ✅ Plugin guide updated with final copy & diagrams; ✅ Release notes mention new toggles.
⛔ Blocked awaiting Authority rate-limiter stream (CORE8/SEC3) to resume so doc updates reflect final limiter behaviour. | | PLG7.RFC | REVIEW | BE-Auth Plugin, Security Guild | PLG4 | Socialize LDAP plugin RFC (`docs/rfcs/authority-plugin-ldap.md`) and capture guild feedback. | ✅ Guild review sign-off recorded; ✅ Follow-up issues filed in module boards. | | PLG6.DIAGRAM | TODO | Docs Guild | PLG6.DOC | Export final sequence/component diagrams for the developer guide and add offline-friendly assets under `docs/assets/authority`. | ✅ Mermaid sources committed; ✅ Rendered SVG/PNG linked from Section 2 + Section 9; ✅ Docs build preview shared with Plugin + Docs guilds. | > Update statuses to DOING/DONE/BLOCKED as you make progress. Always run `dotnet test` for touched projects before marking DONE. + +> Remark (2025-10-13, PLG6.DOC/PLG6.DIAGRAM): Security Guild delivered `docs/security/rate-limits.md`; Docs team can lift Section 3 (tuning table + alerts) into the developer guide diagrams when rendering assets. diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/AuthorityMongoDefaults.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/AuthorityMongoDefaults.cs index c43a0792..311e52c2 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/AuthorityMongoDefaults.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/AuthorityMongoDefaults.cs @@ -22,5 +22,6 @@ public static class AuthorityMongoDefaults public const string LoginAttempts = "authority_login_attempts"; public const string Revocations = "authority_revocations"; public const string RevocationState = "authority_revocation_state"; + public const string Invites = "authority_bootstrap_invites"; } } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityBootstrapInviteDocument.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityBootstrapInviteDocument.cs new file mode 100644 index 00000000..2d9974f4 --- /dev/null +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityBootstrapInviteDocument.cs @@ -0,0 +1,72 @@ +using System; +using System.Collections.Generic; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Authority.Storage.Mongo.Documents; + +/// +/// Represents a bootstrap invitation token for provisioning users or clients. +/// +[BsonIgnoreExtraElements] +public sealed class AuthorityBootstrapInviteDocument +{ + [BsonId] + [BsonRepresentation(BsonType.ObjectId)] + public string Id { get; set; } = ObjectId.GenerateNewId().ToString(); + + [BsonElement("token")] + public string Token { get; set; } = Guid.NewGuid().ToString("N"); + + [BsonElement("type")] + public string Type { get; set; } = "user"; + + [BsonElement("provider")] + [BsonIgnoreIfNull] + public string? Provider { get; set; } + + [BsonElement("target")] + [BsonIgnoreIfNull] + public string? Target { get; set; } + + [BsonElement("issuedAt")] + public DateTimeOffset IssuedAt { get; set; } = DateTimeOffset.UtcNow; + + [BsonElement("issuedBy")] + [BsonIgnoreIfNull] + public string? IssuedBy { get; set; } + + [BsonElement("expiresAt")] + public DateTimeOffset ExpiresAt { get; set; } = DateTimeOffset.UtcNow.AddDays(2); + + [BsonElement("status")] + public string Status { get; set; } = AuthorityBootstrapInviteStatuses.Pending; + + [BsonElement("reservedAt")] + [BsonIgnoreIfNull] + public DateTimeOffset? ReservedAt { get; set; } + + [BsonElement("reservedBy")] + [BsonIgnoreIfNull] + public string? ReservedBy { get; set; } + + [BsonElement("consumedAt")] + [BsonIgnoreIfNull] + public DateTimeOffset? ConsumedAt { get; set; } + + [BsonElement("consumedBy")] + [BsonIgnoreIfNull] + public string? ConsumedBy { get; set; } + + [BsonElement("metadata")] + [BsonIgnoreIfNull] + public Dictionary? Metadata { get; set; } +} + +public static class AuthorityBootstrapInviteStatuses +{ + public const string Pending = "pending"; + public const string Reserved = "reserved"; + public const string Consumed = "consumed"; + public const string Expired = "expired"; +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityTokenDocument.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityTokenDocument.cs index de05127f..fd5156b5 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityTokenDocument.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityTokenDocument.cs @@ -1,3 +1,4 @@ +using System; using System.Collections.Generic; using MongoDB.Bson; using MongoDB.Bson.Serialization.Attributes; @@ -61,6 +62,11 @@ public sealed class AuthorityTokenDocument [BsonIgnoreIfNull] public string? RevokedReasonDescription { get; set; } + + [BsonElement("devices")] + [BsonIgnoreIfNull] + public List? Devices { get; set; } + [BsonElement("revokedMetadata")] [BsonIgnoreIfNull] public Dictionary? RevokedMetadata { get; set; } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs index 8856e5e9..9b48024d 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs @@ -98,12 +98,19 @@ public static class ServiceCollectionExtensions return database.GetCollection(AuthorityMongoDefaults.Collections.RevocationState); }); + services.AddSingleton(static sp => + { + var database = sp.GetRequiredService(); + return database.GetCollection(AuthorityMongoDefaults.Collections.Invites); + }); + services.TryAddSingleton(); services.TryAddSingleton(); services.TryAddSingleton(); services.TryAddSingleton(); services.TryAddSingleton(); services.TryAddSingleton(); + services.TryAddSingleton(); services.TryAddSingleton(); services.TryAddSingleton(); @@ -112,6 +119,7 @@ public static class ServiceCollectionExtensions services.TryAddSingleton(); services.TryAddSingleton(); services.TryAddSingleton(); + services.TryAddSingleton(); return services; } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityBootstrapInviteCollectionInitializer.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityBootstrapInviteCollectionInitializer.cs new file mode 100644 index 00000000..4aea6696 --- /dev/null +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityBootstrapInviteCollectionInitializer.cs @@ -0,0 +1,25 @@ +using MongoDB.Driver; +using StellaOps.Authority.Storage.Mongo.Documents; + +namespace StellaOps.Authority.Storage.Mongo.Initialization; + +internal sealed class AuthorityBootstrapInviteCollectionInitializer : IAuthorityCollectionInitializer +{ + private static readonly CreateIndexModel[] Indexes = + { + new CreateIndexModel( + Builders.IndexKeys.Ascending(i => i.Token), + new CreateIndexOptions { Unique = true, Name = "idx_invite_token" }), + new CreateIndexModel( + Builders.IndexKeys.Ascending(i => i.Status).Ascending(i => i.ExpiresAt), + new CreateIndexOptions { Name = "idx_invite_status_expires" }) + }; + + public async ValueTask EnsureIndexesAsync(IMongoDatabase database, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(database); + + var collection = database.GetCollection(AuthorityMongoDefaults.Collections.Invites); + await collection.Indexes.CreateManyAsync(Indexes, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityBootstrapInviteStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityBootstrapInviteStore.cs new file mode 100644 index 00000000..48c0629f --- /dev/null +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityBootstrapInviteStore.cs @@ -0,0 +1,166 @@ +using System; +using System.Collections.Generic; +using MongoDB.Driver; +using StellaOps.Authority.Storage.Mongo.Documents; + +namespace StellaOps.Authority.Storage.Mongo.Stores; + +internal sealed class AuthorityBootstrapInviteStore : IAuthorityBootstrapInviteStore +{ + private readonly IMongoCollection collection; + + public AuthorityBootstrapInviteStore(IMongoCollection collection) + => this.collection = collection ?? throw new ArgumentNullException(nameof(collection)); + + public async ValueTask CreateAsync(AuthorityBootstrapInviteDocument document, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(document); + + await collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + return document; + } + + public async ValueTask TryReserveAsync( + string token, + string expectedType, + DateTimeOffset now, + string? reservedBy, + CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(token)) + { + return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.NotFound, null); + } + + var normalizedToken = token.Trim(); + var filter = Builders.Filter.And( + Builders.Filter.Eq(i => i.Token, normalizedToken), + Builders.Filter.Eq(i => i.Status, AuthorityBootstrapInviteStatuses.Pending)); + + var update = Builders.Update + .Set(i => i.Status, AuthorityBootstrapInviteStatuses.Reserved) + .Set(i => i.ReservedAt, now) + .Set(i => i.ReservedBy, reservedBy); + + var options = new FindOneAndUpdateOptions + { + ReturnDocument = ReturnDocument.After + }; + + var invite = await collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false); + + if (invite is null) + { + var existing = await collection + .Find(i => i.Token == normalizedToken) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + if (existing is null) + { + return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.NotFound, null); + } + + if (existing.Status is AuthorityBootstrapInviteStatuses.Consumed or AuthorityBootstrapInviteStatuses.Reserved) + { + return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.AlreadyUsed, existing); + } + + if (existing.Status == AuthorityBootstrapInviteStatuses.Expired || existing.ExpiresAt <= now) + { + return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.Expired, existing); + } + + return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.NotFound, existing); + } + + if (!string.Equals(invite.Type, expectedType, StringComparison.OrdinalIgnoreCase)) + { + await ReleaseAsync(normalizedToken, cancellationToken).ConfigureAwait(false); + return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.NotFound, invite); + } + + if (invite.ExpiresAt <= now) + { + await MarkExpiredAsync(normalizedToken, cancellationToken).ConfigureAwait(false); + return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.Expired, invite); + } + + return new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.Reserved, invite); + } + + public async ValueTask ReleaseAsync(string token, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(token)) + { + return false; + } + + var result = await collection.UpdateOneAsync( + Builders.Filter.And( + Builders.Filter.Eq(i => i.Token, token.Trim()), + Builders.Filter.Eq(i => i.Status, AuthorityBootstrapInviteStatuses.Reserved)), + Builders.Update + .Set(i => i.Status, AuthorityBootstrapInviteStatuses.Pending) + .Set(i => i.ReservedAt, null) + .Set(i => i.ReservedBy, null), + cancellationToken: cancellationToken).ConfigureAwait(false); + + return result.ModifiedCount > 0; + } + + public async ValueTask MarkConsumedAsync(string token, string? consumedBy, DateTimeOffset consumedAt, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(token)) + { + return false; + } + + var result = await collection.UpdateOneAsync( + Builders.Filter.And( + Builders.Filter.Eq(i => i.Token, token.Trim()), + Builders.Filter.Eq(i => i.Status, AuthorityBootstrapInviteStatuses.Reserved)), + Builders.Update + .Set(i => i.Status, AuthorityBootstrapInviteStatuses.Consumed) + .Set(i => i.ConsumedAt, consumedAt) + .Set(i => i.ConsumedBy, consumedBy), + cancellationToken: cancellationToken).ConfigureAwait(false); + + return result.ModifiedCount > 0; + } + + public async ValueTask> ExpireAsync(DateTimeOffset now, CancellationToken cancellationToken) + { + var filter = Builders.Filter.And( + Builders.Filter.Lte(i => i.ExpiresAt, now), + Builders.Filter.In( + i => i.Status, + new[] { AuthorityBootstrapInviteStatuses.Pending, AuthorityBootstrapInviteStatuses.Reserved })); + + var update = Builders.Update + .Set(i => i.Status, AuthorityBootstrapInviteStatuses.Expired) + .Set(i => i.ReservedAt, null) + .Set(i => i.ReservedBy, null); + + var expired = await collection.Find(filter) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + if (expired.Count == 0) + { + return Array.Empty(); + } + + await collection.UpdateManyAsync(filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + + return expired; + } + + private async Task MarkExpiredAsync(string token, CancellationToken cancellationToken) + { + await collection.UpdateOneAsync( + Builders.Filter.Eq(i => i.Token, token), + Builders.Update.Set(i => i.Status, AuthorityBootstrapInviteStatuses.Expired), + cancellationToken: cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs index c74a1ea6..da2c4477 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs @@ -1,6 +1,10 @@ +using System; using System.Collections.Generic; using Microsoft.Extensions.Logging; +using MongoDB.Bson; using MongoDB.Driver; +using System.Linq; +using System.Globalization; using StellaOps.Authority.Storage.Mongo.Documents; namespace StellaOps.Authority.Storage.Mongo.Stores; @@ -86,6 +90,86 @@ internal sealed class AuthorityTokenStore : IAuthorityTokenStore logger.LogDebug("Updated token {TokenId} status to {Status} (matched {Matched}).", tokenId, status, result.MatchedCount); } + + public async ValueTask RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(tokenId)) + { + return new TokenUsageUpdateResult(TokenUsageUpdateStatus.NotFound, null, null); + } + + if (string.IsNullOrWhiteSpace(remoteAddress) && string.IsNullOrWhiteSpace(userAgent)) + { + return new TokenUsageUpdateResult(TokenUsageUpdateStatus.MissingMetadata, remoteAddress, userAgent); + } + + var id = tokenId.Trim(); + var token = await collection + .Find(t => t.TokenId == id) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + if (token is null) + { + return new TokenUsageUpdateResult(TokenUsageUpdateStatus.NotFound, remoteAddress, userAgent); + } + + token.Devices ??= new List(); + + string? normalizedAddress = string.IsNullOrWhiteSpace(remoteAddress) ? null : remoteAddress.Trim(); + string? normalizedAgent = string.IsNullOrWhiteSpace(userAgent) ? null : userAgent.Trim(); + + var device = token.Devices.FirstOrDefault(d => + string.Equals(GetString(d, "remoteAddress"), normalizedAddress, StringComparison.OrdinalIgnoreCase) && + string.Equals(GetString(d, "userAgent"), normalizedAgent, StringComparison.Ordinal)); + var suspicious = false; + + if (device is null) + { + suspicious = token.Devices.Count > 0; + var document = new BsonDocument + { + { "remoteAddress", normalizedAddress }, + { "userAgent", normalizedAgent }, + { "firstSeen", BsonDateTime.Create(observedAt.UtcDateTime) }, + { "lastSeen", BsonDateTime.Create(observedAt.UtcDateTime) }, + { "useCount", 1 } + }; + + token.Devices.Add(document); + } + else + { + device["lastSeen"] = BsonDateTime.Create(observedAt.UtcDateTime); + device["useCount"] = device.TryGetValue("useCount", out var existingCount) && existingCount.IsInt32 + ? existingCount.AsInt32 + 1 + : 1; + } + + var update = Builders.Update.Set(t => t.Devices, token.Devices); + await collection.UpdateOneAsync( + Builders.Filter.Eq(t => t.TokenId, id), + update, + cancellationToken: cancellationToken).ConfigureAwait(false); + + return new TokenUsageUpdateResult(suspicious ? TokenUsageUpdateStatus.SuspectedReplay : TokenUsageUpdateStatus.Recorded, normalizedAddress, normalizedAgent); + } + + private static string? GetString(BsonDocument document, string name) + { + if (!document.TryGetValue(name, out var value)) + { + return null; + } + + return value switch + { + { IsString: true } => value.AsString, + { IsBsonNull: true } => null, + _ => value.ToString() + }; + } + public async ValueTask DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken) { var filter = Builders.Filter.And( diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityBootstrapInviteStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityBootstrapInviteStore.cs new file mode 100644 index 00000000..c0a51bc5 --- /dev/null +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityBootstrapInviteStore.cs @@ -0,0 +1,26 @@ +using StellaOps.Authority.Storage.Mongo.Documents; + +namespace StellaOps.Authority.Storage.Mongo.Stores; + +public interface IAuthorityBootstrapInviteStore +{ + ValueTask CreateAsync(AuthorityBootstrapInviteDocument document, CancellationToken cancellationToken); + + ValueTask TryReserveAsync(string token, string expectedType, DateTimeOffset now, string? reservedBy, CancellationToken cancellationToken); + + ValueTask ReleaseAsync(string token, CancellationToken cancellationToken); + + ValueTask MarkConsumedAsync(string token, string? consumedBy, DateTimeOffset consumedAt, CancellationToken cancellationToken); + + ValueTask> ExpireAsync(DateTimeOffset now, CancellationToken cancellationToken); +} + +public enum BootstrapInviteReservationStatus +{ + Reserved, + NotFound, + Expired, + AlreadyUsed +} + +public sealed record BootstrapInviteReservationResult(BootstrapInviteReservationStatus Status, AuthorityBootstrapInviteDocument? Invite); diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs index fc576bd9..f4bb918a 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs @@ -1,3 +1,5 @@ +using System; +using System.Collections.Generic; using StellaOps.Authority.Storage.Mongo.Documents; namespace StellaOps.Authority.Storage.Mongo.Stores; @@ -21,5 +23,17 @@ public interface IAuthorityTokenStore ValueTask DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken); + ValueTask RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken); + ValueTask> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken); } + +public enum TokenUsageUpdateStatus +{ + Recorded, + SuspectedReplay, + MissingMetadata, + NotFound +} + +public sealed record TokenUsageUpdateResult(TokenUsageUpdateStatus Status, string? RemoteAddress, string? UserAgent); diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/Bootstrap/BootstrapInviteCleanupServiceTests.cs b/src/StellaOps.Authority/StellaOps.Authority.Tests/Bootstrap/BootstrapInviteCleanupServiceTests.cs new file mode 100644 index 00000000..b707b251 --- /dev/null +++ b/src/StellaOps.Authority/StellaOps.Authority.Tests/Bootstrap/BootstrapInviteCleanupServiceTests.cs @@ -0,0 +1,97 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Time.Testing; +using StellaOps.Authority.Bootstrap; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Stores; +using StellaOps.Cryptography.Audit; +using Xunit; + +namespace StellaOps.Authority.Tests.Bootstrap; + +public sealed class BootstrapInviteCleanupServiceTests +{ + [Fact] + public async Task SweepExpiredInvitesAsync_ExpiresInvitesAndEmitsAuditRecords() + { + var now = new DateTimeOffset(2025, 10, 14, 12, 0, 0, TimeSpan.Zero); + var timeProvider = new FakeTimeProvider(now); + + var invites = new List + { + new() + { + Token = "token-1", + Type = BootstrapInviteTypes.User, + ExpiresAt = now.AddMinutes(-5), + Provider = "standard", + Target = "alice@example.com", + Status = AuthorityBootstrapInviteStatuses.Pending + }, + new() + { + Token = "token-2", + Type = BootstrapInviteTypes.Client, + ExpiresAt = now.AddMinutes(-1), + Provider = "standard", + Target = "client-1", + Status = AuthorityBootstrapInviteStatuses.Reserved + } + }; + + var store = new FakeInviteStore(invites); + var sink = new CapturingAuthEventSink(); + var service = new BootstrapInviteCleanupService(store, sink, timeProvider, NullLogger.Instance); + + await service.SweepExpiredInvitesAsync(CancellationToken.None); + + Assert.True(store.ExpireCalled); + Assert.Equal(2, sink.Events.Count); + Assert.All(sink.Events, record => Assert.Equal("authority.bootstrap.invite.expired", record.EventType)); + Assert.Contains(sink.Events, record => record.Properties.Any(property => property.Name == "invite.token" && property.Value.Value == "token-1")); + Assert.Contains(sink.Events, record => record.Properties.Any(property => property.Name == "invite.token" && property.Value.Value == "token-2")); + } + + private sealed class FakeInviteStore : IAuthorityBootstrapInviteStore + { + private readonly IReadOnlyList invites; + + public FakeInviteStore(IReadOnlyList invites) + => this.invites = invites; + + public bool ExpireCalled { get; private set; } + + public ValueTask CreateAsync(AuthorityBootstrapInviteDocument document, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public ValueTask TryReserveAsync(string token, string expectedType, DateTimeOffset now, string? reservedBy, CancellationToken cancellationToken) + => ValueTask.FromResult(new BootstrapInviteReservationResult(BootstrapInviteReservationStatus.NotFound, null)); + + public ValueTask ReleaseAsync(string token, CancellationToken cancellationToken) + => ValueTask.FromResult(false); + + public ValueTask MarkConsumedAsync(string token, string? consumedBy, DateTimeOffset consumedAt, CancellationToken cancellationToken) + => ValueTask.FromResult(false); + + public ValueTask> ExpireAsync(DateTimeOffset now, CancellationToken cancellationToken) + { + ExpireCalled = true; + return ValueTask.FromResult(invites); + } + } + + private sealed class CapturingAuthEventSink : IAuthEventSink + { + public List Events { get; } = new(); + + public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) + { + Events.Add(record); + return ValueTask.CompletedTask; + } + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/ClientCredentialsAndTokenHandlersTests.cs b/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/ClientCredentialsAndTokenHandlersTests.cs index 086c5b54..25b3e059 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/ClientCredentialsAndTokenHandlersTests.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/ClientCredentialsAndTokenHandlersTests.cs @@ -17,6 +17,7 @@ using StellaOps.Authority.Storage.Mongo.Stores; using StellaOps.Authority.RateLimiting; using StellaOps.Cryptography.Audit; using Xunit; +using MongoDB.Bson; using static StellaOps.Authority.Tests.OpenIddict.TestHelpers; namespace StellaOps.Authority.Tests.OpenIddict; @@ -76,7 +77,7 @@ public class ClientCredentialsHandlersTests await handler.HandleAsync(context); - Assert.False(context.IsRejected); + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); Assert.Same(clientDocument, context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty]); var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); @@ -84,6 +85,36 @@ public class ClientCredentialsHandlersTests Assert.Equal(clientDocument.Plugin, context.Transaction.Properties[AuthorityOpenIddictConstants.ClientProviderTransactionProperty]); } + [Fact] + public async Task ValidateClientCredentials_EmitsTamperAuditEvent_WhenUnexpectedParametersPresent() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var sink = new TestAuthEventSink(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + sink, + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + transaction.Request?.SetParameter("unexpected_param", "value"); + + await handler.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); + + var tamperEvent = Assert.Single(sink.Events, record => record.EventType == "authority.token.tamper"); + Assert.Contains(tamperEvent.Properties, property => + string.Equals(property.Name, "request.unexpected_parameter", StringComparison.OrdinalIgnoreCase) && + string.Equals(property.Value.Value, "unexpected_param", StringComparison.OrdinalIgnoreCase)); + } + [Fact] public async Task HandleClientCredentials_PersistsTokenAndEnrichesClaims() { @@ -98,22 +129,30 @@ public class ClientCredentialsHandlersTests var tokenStore = new TestTokenStore(); var authSink = new TestAuthEventSink(); var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var validateHandler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + authSink, + metadataAccessor, + TimeProvider.System, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, secret: null, scope: "jobs:trigger"); + transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(30); + + var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + await validateHandler.HandleAsync(validateContext); + Assert.False(validateContext.IsRejected); + var handler = new HandleClientCredentialsHandler( registry, tokenStore, TimeProvider.System, TestActivitySource, - authSink, - metadataAccessor, NullLogger.Instance); var persistHandler = new PersistTokensHandler(tokenStore, TimeProvider.System, TestActivitySource, NullLogger.Instance); - var transaction = CreateTokenTransaction(clientDocument.ClientId, secret: null, scope: "jobs:trigger"); - transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(30); - transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty] = clientDocument; - transaction.Properties[AuthorityOpenIddictConstants.ClientProviderTransactionProperty] = clientDocument.Plugin!; - transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty] = new[] { "jobs:trigger" }; - var context = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); await handler.HandleAsync(context); @@ -161,10 +200,14 @@ public class TokenValidationHandlersTests ClientId = "feedser" }; + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); var handler = new ValidateAccessTokenHandler( tokenStore, new TestClientStore(CreateClient()), CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(CreateClient())), + metadataAccessor, + auditSink, TimeProvider.System, TestActivitySource, NullLogger.Instance); @@ -203,10 +246,14 @@ public class TokenValidationHandlersTests var registry = new AuthorityIdentityProviderRegistry(new[] { plugin }, NullLogger.Instance); + var metadataAccessorSuccess = new TestRateLimiterMetadataAccessor(); + var auditSinkSuccess = new TestAuthEventSink(); var handler = new ValidateAccessTokenHandler( new TestTokenStore(), new TestClientStore(clientDocument), registry, + metadataAccessorSuccess, + auditSinkSuccess, TimeProvider.System, TestActivitySource, NullLogger.Instance); @@ -229,6 +276,76 @@ public class TokenValidationHandlersTests Assert.False(context.IsRejected); Assert.Contains(principal.Claims, claim => claim.Type == "enriched" && claim.Value == "true"); } + + [Fact] + public async Task ValidateAccessTokenHandler_EmitsReplayAudit_WhenStoreDetectsSuspectedReplay() + { + var tokenStore = new TestTokenStore(); + tokenStore.Inserted = new AuthorityTokenDocument + { + TokenId = "token-replay", + Status = "valid", + ClientId = "agent", + Devices = new List + { + new BsonDocument + { + { "remoteAddress", "10.0.0.1" }, + { "userAgent", "agent/1.0" }, + { "firstSeen", BsonDateTime.Create(DateTimeOffset.UtcNow.AddMinutes(-15)) }, + { "lastSeen", BsonDateTime.Create(DateTimeOffset.UtcNow.AddMinutes(-5)) }, + { "useCount", 2 } + } + } + }; + + tokenStore.UsageCallback = (remote, agent) => new TokenUsageUpdateResult(TokenUsageUpdateStatus.SuspectedReplay, remote, agent); + + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var metadata = metadataAccessor.GetMetadata(); + if (metadata is not null) + { + metadata.RemoteIp = "203.0.113.7"; + metadata.UserAgent = "agent/2.0"; + } + + var clientDocument = CreateClient(); + clientDocument.ClientId = "agent"; + var auditSink = new TestAuthEventSink(); + var registry = CreateRegistry(withClientProvisioning: false, clientDescriptor: null); + var handler = new ValidateAccessTokenHandler( + tokenStore, + new TestClientStore(clientDocument), + registry, + metadataAccessor, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Introspection, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal("agent", "token-replay", "standard"); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = "token-replay" + }; + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected); + var replayEvent = Assert.Single(auditSink.Events, record => record.EventType == "authority.token.replay.suspected"); + Assert.Equal(AuthEventOutcome.Error, replayEvent.Outcome); + Assert.NotNull(replayEvent.Network); + Assert.Equal("203.0.113.7", replayEvent.Network?.RemoteAddress.Value); + Assert.Contains(replayEvent.Properties, property => property.Name == "token.devices.total"); + } } internal sealed class TestClientStore : IAuthorityClientStore @@ -263,6 +380,8 @@ internal sealed class TestTokenStore : IAuthorityTokenStore { public AuthorityTokenDocument? Inserted { get; set; } + public Func? UsageCallback { get; set; } + public ValueTask InsertAsync(AuthorityTokenDocument document, CancellationToken cancellationToken) { Inserted = document; @@ -281,6 +400,9 @@ internal sealed class TestTokenStore : IAuthorityTokenStore public ValueTask DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken) => ValueTask.FromResult(0L); + public ValueTask RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken) + => ValueTask.FromResult(UsageCallback?.Invoke(remoteAddress, userAgent) ?? new TokenUsageUpdateResult(TokenUsageUpdateStatus.Recorded, remoteAddress, userAgent)); + public ValueTask> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken) => ValueTask.FromResult>(Array.Empty()); } diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/PasswordGrantHandlersTests.cs b/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/PasswordGrantHandlersTests.cs index cca234a0..b0f502c2 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/PasswordGrantHandlersTests.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/PasswordGrantHandlersTests.cs @@ -74,6 +74,26 @@ public class PasswordGrantHandlersTests Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.LockedOut); } + [Fact] + public async Task ValidatePasswordGrant_EmitsTamperAuditEvent_WhenUnexpectedParametersPresent() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, TimeProvider.System, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!"); + transaction.Request?.SetParameter("unexpected_param", "value"); + + await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); + + var tamperEvent = Assert.Single(sink.Events, record => record.EventType == "authority.token.tamper"); + Assert.Equal(AuthEventOutcome.Failure, tamperEvent.Outcome); + Assert.Contains(tamperEvent.Properties, property => + string.Equals(property.Name, "request.unexpected_parameter", StringComparison.OrdinalIgnoreCase) && + string.Equals(property.Value.Value, "unexpected_param", StringComparison.OrdinalIgnoreCase)); + } + private static AuthorityIdentityProviderRegistry CreateRegistry(IUserCredentialStore store) { var plugin = new StubIdentityProviderPlugin("stub", store); @@ -104,14 +124,14 @@ public class PasswordGrantHandlersTests Name = name; Type = "stub"; var manifest = new AuthorityPluginManifest( - name, - "stub", - enabled: true, - version: null, - description: null, - capabilities: new[] { AuthorityPluginCapabilities.Password }, - configuration: new Dictionary(StringComparer.OrdinalIgnoreCase), - configPath: $"{name}.yaml"); + Name: name, + Type: "stub", + Enabled: true, + AssemblyName: null, + AssemblyPath: null, + Capabilities: new[] { AuthorityPluginCapabilities.Password }, + Metadata: new Dictionary(StringComparer.OrdinalIgnoreCase), + ConfigPath: $"{name}.yaml"); Context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build()); Credentials = store; ClaimsEnricher = new NoopClaimsEnricher(); diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/TokenPersistenceIntegrationTests.cs b/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/TokenPersistenceIntegrationTests.cs index 76b15cde..bf7e8dc5 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/TokenPersistenceIntegrationTests.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/TokenPersistenceIntegrationTests.cs @@ -5,6 +5,7 @@ using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging.Abstractions; using Microsoft.Extensions.Time.Testing; using MongoDB.Driver; +using MongoDB.Bson; using OpenIddict.Abstractions; using OpenIddict.Server; using StellaOps.Authority; @@ -56,10 +57,10 @@ public sealed class TokenPersistenceIntegrationTests withClientProvisioning: true, clientDescriptor: TestHelpers.CreateDescriptor(clientDocument)); - var validateHandler = new ValidateClientCredentialsHandler(clientStore, registry, TestActivitySource, NullLogger.Instance); var authSink = new TestAuthEventSink(); var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var handleHandler = new HandleClientCredentialsHandler(registry, TestActivitySource, authSink, metadataAccessor, clock, NullLogger.Instance); + var validateHandler = new ValidateClientCredentialsHandler(clientStore, registry, TestActivitySource, authSink, metadataAccessor, clock, NullLogger.Instance); + var handleHandler = new HandleClientCredentialsHandler(registry, tokenStore, clock, TestActivitySource, NullLogger.Instance); var persistHandler = new PersistTokensHandler(tokenStore, clock, TestActivitySource, NullLogger.Instance); var transaction = TestHelpers.CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:trigger"); @@ -148,10 +149,14 @@ public sealed class TokenPersistenceIntegrationTests var revokedAt = now.AddMinutes(1); await tokenStore.UpdateStatusAsync(revokedTokenId, "revoked", revokedAt, "manual", null, null, CancellationToken.None); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); var handler = new ValidateAccessTokenHandler( tokenStore, clientStore, registry, + metadataAccessor, + auditSink, clock, TestActivitySource, NullLogger.Instance); @@ -190,6 +195,60 @@ public sealed class TokenPersistenceIntegrationTests Assert.Equal("manual", stored.RevokedReason); } + [Fact] + public async Task RecordUsageAsync_FlagsSuspectedReplay_OnNewDeviceFingerprint() + { + await ResetCollectionsAsync(); + + var issuedAt = new DateTimeOffset(2025, 10, 14, 8, 0, 0, TimeSpan.Zero); + var clock = new FakeTimeProvider(issuedAt); + + await using var provider = await BuildMongoProviderAsync(clock); + + var tokenStore = provider.GetRequiredService(); + + var tokenDocument = new AuthorityTokenDocument + { + TokenId = "token-replay", + Type = OpenIddictConstants.TokenTypeHints.AccessToken, + ClientId = "client-1", + Status = "valid", + CreatedAt = issuedAt, + Devices = new List + { + new BsonDocument + { + { "remoteAddress", "10.0.0.1" }, + { "userAgent", "agent/1.0" }, + { "firstSeen", BsonDateTime.Create(issuedAt.AddMinutes(-10).UtcDateTime) }, + { "lastSeen", BsonDateTime.Create(issuedAt.AddMinutes(-5).UtcDateTime) }, + { "useCount", 2 } + } + } + }; + + await tokenStore.InsertAsync(tokenDocument, CancellationToken.None); + + var result = await tokenStore.RecordUsageAsync( + "token-replay", + remoteAddress: "10.0.0.2", + userAgent: "agent/2.0", + observedAt: clock.GetUtcNow(), + CancellationToken.None); + + Assert.Equal(TokenUsageUpdateStatus.SuspectedReplay, result.Status); + + var stored = await tokenStore.FindByTokenIdAsync("token-replay", CancellationToken.None); + Assert.NotNull(stored); + Assert.Equal(2, stored!.Devices?.Count); + Assert.Contains(stored.Devices!, doc => + { + var remote = doc.TryGetValue("remoteAddress", out var ra) && ra.IsString ? ra.AsString : null; + var agentValue = doc.TryGetValue("userAgent", out var ua) && ua.IsString ? ua.AsString : null; + return remote == "10.0.0.2" && agentValue == "agent/2.0"; + }); + } + private async Task ResetCollectionsAsync() { var tokens = fixture.Database.GetCollection(AuthorityMongoDefaults.Collections.Tokens); @@ -220,27 +279,3 @@ public sealed class TokenPersistenceIntegrationTests return provider; } } - -internal sealed class TestAuthEventSink : IAuthEventSink -{ - public List Records { get; } = new(); - - public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) - { - Records.Add(record); - return ValueTask.CompletedTask; - } -} - -internal sealed class TestRateLimiterMetadataAccessor : IAuthorityRateLimiterMetadataAccessor -{ - private readonly AuthorityRateLimiterMetadata metadata = new(); - - public AuthorityRateLimiterMetadata? GetMetadata() => metadata; - - public void SetClientId(string? clientId) => metadata.ClientId = string.IsNullOrWhiteSpace(clientId) ? null : clientId; - - public void SetSubjectId(string? subjectId) => metadata.SubjectId = string.IsNullOrWhiteSpace(subjectId) ? null : subjectId; - - public void SetTag(string name, string? value) => metadata.SetTag(name, value); -} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataMiddlewareTests.cs b/src/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataMiddlewareTests.cs index 0a072a06..df687867 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataMiddlewareTests.cs +++ b/src/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataMiddlewareTests.cs @@ -76,6 +76,7 @@ public class AuthorityRateLimiterMetadataMiddlewareTests context.Request.Path = "/token"; context.Request.Method = HttpMethods.Post; context.Request.Headers["X-Forwarded-For"] = "203.0.113.99"; + context.Request.Headers.UserAgent = "StellaOps-Client/1.2"; var middleware = CreateMiddleware(); await middleware.InvokeAsync(context); @@ -84,6 +85,9 @@ public class AuthorityRateLimiterMetadataMiddlewareTests Assert.NotNull(metadata); Assert.Equal("203.0.113.99", metadata!.RemoteIp); Assert.Equal("203.0.113.99", metadata.ForwardedFor); + Assert.Equal("StellaOps-Client/1.2", metadata.UserAgent); + Assert.True(metadata.Tags.TryGetValue("authority.user_agent", out var tagValue)); + Assert.Equal("StellaOps-Client/1.2", tagValue); } private static AuthorityRateLimiterMetadataMiddleware CreateMiddleware() diff --git a/src/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapInviteCleanupService.cs b/src/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapInviteCleanupService.cs new file mode 100644 index 00000000..927aca24 --- /dev/null +++ b/src/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapInviteCleanupService.cs @@ -0,0 +1,106 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using StellaOps.Authority.Storage.Mongo.Stores; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Cryptography.Audit; + +namespace StellaOps.Authority.Bootstrap; + +internal sealed class BootstrapInviteCleanupService : BackgroundService +{ + private readonly IAuthorityBootstrapInviteStore inviteStore; + private readonly IAuthEventSink auditSink; + private readonly TimeProvider timeProvider; + private readonly ILogger logger; + private readonly TimeSpan interval; + + public BootstrapInviteCleanupService( + IAuthorityBootstrapInviteStore inviteStore, + IAuthEventSink auditSink, + TimeProvider timeProvider, + ILogger logger) + { + this.inviteStore = inviteStore ?? throw new ArgumentNullException(nameof(inviteStore)); + this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); + this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + interval = TimeSpan.FromMinutes(5); + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + var timer = new PeriodicTimer(interval); + + try + { + while (await timer.WaitForNextTickAsync(stoppingToken).ConfigureAwait(false)) + { + await SweepExpiredInvitesAsync(stoppingToken).ConfigureAwait(false); + } + } + catch (OperationCanceledException) + { + // Shutdown requested. + } + finally + { + timer.Dispose(); + } + } + + internal async Task SweepExpiredInvitesAsync(CancellationToken cancellationToken) + { + var now = timeProvider.GetUtcNow(); + var expired = await inviteStore.ExpireAsync(now, cancellationToken).ConfigureAwait(false); + if (expired.Count == 0) + { + return; + } + + logger.LogInformation("Expired {Count} bootstrap invite(s).", expired.Count); + + foreach (var invite in expired) + { + var record = new AuthEventRecord + { + EventType = "authority.bootstrap.invite.expired", + OccurredAt = now, + CorrelationId = Guid.NewGuid().ToString("N"), + Outcome = AuthEventOutcome.Success, + Reason = "Invite expired before consumption.", + Subject = null, + Client = null, + Scopes = Array.Empty(), + Network = null, + Properties = BuildInviteProperties(invite) + }; + + await auditSink.WriteAsync(record, cancellationToken).ConfigureAwait(false); + } + } + + private static AuthEventProperty[] BuildInviteProperties(AuthorityBootstrapInviteDocument invite) + { + var properties = new List + { + new() { Name = "invite.token", Value = ClassifiedString.Public(invite.Token) }, + new() { Name = "invite.type", Value = ClassifiedString.Public(invite.Type) }, + new() { Name = "invite.expires_at", Value = ClassifiedString.Public(invite.ExpiresAt.ToString("O", CultureInfo.InvariantCulture)) } + }; + + if (!string.IsNullOrWhiteSpace(invite.Provider)) + { + properties.Add(new AuthEventProperty { Name = "invite.provider", Value = ClassifiedString.Public(invite.Provider) }); + } + + if (!string.IsNullOrWhiteSpace(invite.Target)) + { + properties.Add(new AuthEventProperty { Name = "invite.target", Value = ClassifiedString.Public(invite.Target) }); + } + + return properties.ToArray(); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapRequests.cs b/src/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapRequests.cs index f5c31955..a6524532 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapRequests.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapRequests.cs @@ -6,6 +6,8 @@ internal sealed record BootstrapUserRequest { public string? Provider { get; init; } + public string? InviteToken { get; init; } + [Required] public string Username { get; init; } = string.Empty; @@ -27,6 +29,8 @@ internal sealed record BootstrapClientRequest { public string? Provider { get; init; } + public string? InviteToken { get; init; } + [Required] public string ClientId { get; init; } = string.Empty; @@ -46,3 +50,26 @@ internal sealed record BootstrapClientRequest public IReadOnlyDictionary? Properties { get; init; } } + +internal sealed record BootstrapInviteRequest +{ + public string Type { get; init; } = BootstrapInviteTypes.User; + + public string? Token { get; init; } + + public string? Provider { get; init; } + + public string? Target { get; init; } + + public DateTimeOffset? ExpiresAt { get; init; } + + public string? IssuedBy { get; init; } + + public IReadOnlyDictionary? Metadata { get; init; } +} + +internal static class BootstrapInviteTypes +{ + public const string User = "user"; + public const string Client = "client"; +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsAuditHelper.cs b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsAuditHelper.cs new file mode 100644 index 00000000..e2e01f70 --- /dev/null +++ b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsAuditHelper.cs @@ -0,0 +1,252 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using OpenIddict.Abstractions; +using OpenIddict.Server; +using StellaOps.Authority.RateLimiting; +using StellaOps.Cryptography.Audit; + +namespace StellaOps.Authority.OpenIddict.Handlers; + +internal static class ClientCredentialsAuditHelper +{ + internal static string EnsureCorrelationId(OpenIddictServerTransaction transaction) + { + ArgumentNullException.ThrowIfNull(transaction); + + if (transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.AuditCorrelationProperty, out var value) && + value is string existing && + !string.IsNullOrWhiteSpace(existing)) + { + return existing; + } + + var correlation = Activity.Current?.TraceId.ToString() ?? + Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture); + + transaction.Properties[AuthorityOpenIddictConstants.AuditCorrelationProperty] = correlation; + return correlation; + } + + internal static AuthEventRecord CreateRecord( + TimeProvider timeProvider, + OpenIddictServerTransaction transaction, + AuthorityRateLimiterMetadata? metadata, + string? clientSecret, + AuthEventOutcome outcome, + string? reason, + string? clientId, + string? providerName, + bool? confidential, + IReadOnlyList requestedScopes, + IReadOnlyList grantedScopes, + string? invalidScope, + IEnumerable? extraProperties = null, + string? eventType = null) + { + ArgumentNullException.ThrowIfNull(timeProvider); + ArgumentNullException.ThrowIfNull(transaction); + + var correlationId = EnsureCorrelationId(transaction); + var client = BuildClient(clientId, providerName); + var network = BuildNetwork(metadata); + var normalizedGranted = NormalizeScopes(grantedScopes); + var properties = BuildProperties(confidential, requestedScopes, invalidScope, extraProperties); + + return new AuthEventRecord + { + EventType = string.IsNullOrWhiteSpace(eventType) ? "authority.client_credentials.grant" : eventType, + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = correlationId, + Outcome = outcome, + Reason = Normalize(reason), + Subject = null, + Client = client, + Scopes = normalizedGranted, + Network = network, + Properties = properties + }; + } + + internal static AuthEventRecord CreateTamperRecord( + TimeProvider timeProvider, + OpenIddictServerTransaction transaction, + AuthorityRateLimiterMetadata? metadata, + string? clientId, + string? providerName, + bool? confidential, + IEnumerable unexpectedParameters) + { + var properties = new List + { + new() + { + Name = "request.tampered", + Value = ClassifiedString.Public("true") + } + }; + + if (confidential.HasValue) + { + properties.Add(new AuthEventProperty + { + Name = "client.confidential", + Value = ClassifiedString.Public(confidential.Value ? "true" : "false") + }); + } + + if (unexpectedParameters is not null) + { + foreach (var parameter in unexpectedParameters) + { + if (string.IsNullOrWhiteSpace(parameter)) + { + continue; + } + + properties.Add(new AuthEventProperty + { + Name = "request.unexpected_parameter", + Value = ClassifiedString.Public(parameter) + }); + } + } + + var reason = unexpectedParameters is null + ? "Unexpected parameters supplied to client credentials request." + : $"Unexpected parameters supplied to client credentials request: {string.Join(", ", unexpectedParameters)}."; + + return CreateRecord( + timeProvider, + transaction, + metadata, + clientSecret: null, + outcome: AuthEventOutcome.Failure, + reason: reason, + clientId: clientId, + providerName: providerName, + confidential: confidential, + requestedScopes: Array.Empty(), + grantedScopes: Array.Empty(), + invalidScope: null, + extraProperties: properties, + eventType: "authority.token.tamper"); + } + + private static AuthEventClient? BuildClient(string? clientId, string? providerName) + { + if (string.IsNullOrWhiteSpace(clientId) && string.IsNullOrWhiteSpace(providerName)) + { + return null; + } + + return new AuthEventClient + { + ClientId = ClassifiedString.Personal(Normalize(clientId)), + Name = ClassifiedString.Empty, + Provider = ClassifiedString.Public(Normalize(providerName)) + }; + } + + private static AuthEventNetwork? BuildNetwork(AuthorityRateLimiterMetadata? metadata) + { + var remote = Normalize(metadata?.RemoteIp); + var forwarded = Normalize(metadata?.ForwardedFor); + var userAgent = Normalize(metadata?.UserAgent); + + if (string.IsNullOrWhiteSpace(remote) && string.IsNullOrWhiteSpace(forwarded) && string.IsNullOrWhiteSpace(userAgent)) + { + return null; + } + + return new AuthEventNetwork + { + RemoteAddress = ClassifiedString.Personal(remote), + ForwardedFor = ClassifiedString.Personal(forwarded), + UserAgent = ClassifiedString.Personal(userAgent) + }; + } + + private static IReadOnlyList BuildProperties( + bool? confidential, + IReadOnlyList requestedScopes, + string? invalidScope, + IEnumerable? extraProperties) + { + var properties = new List(); + + if (confidential.HasValue) + { + properties.Add(new AuthEventProperty + { + Name = "client.confidential", + Value = ClassifiedString.Public(confidential.Value ? "true" : "false") + }); + } + + var normalizedRequested = NormalizeScopes(requestedScopes); + if (normalizedRequested is { Count: > 0 }) + { + foreach (var scope in normalizedRequested) + { + if (string.IsNullOrWhiteSpace(scope)) + { + continue; + } + + properties.Add(new AuthEventProperty + { + Name = "scope.requested", + Value = ClassifiedString.Public(scope) + }); + } + } + + if (!string.IsNullOrWhiteSpace(invalidScope)) + { + properties.Add(new AuthEventProperty + { + Name = "scope.invalid", + Value = ClassifiedString.Public(invalidScope) + }); + } + + if (extraProperties is not null) + { + foreach (var property in extraProperties) + { + if (property is null || string.IsNullOrWhiteSpace(property.Name)) + { + continue; + } + + properties.Add(property); + } + } + + return properties.Count == 0 ? Array.Empty() : properties; + } + + private static IReadOnlyList NormalizeScopes(IReadOnlyList? scopes) + { + if (scopes is null || scopes.Count == 0) + { + return Array.Empty(); + } + + var normalized = scopes + .Where(static scope => !string.IsNullOrWhiteSpace(scope)) + .Select(static scope => scope.Trim()) + .Where(static scope => scope.Length > 0) + .Distinct(StringComparer.Ordinal) + .OrderBy(static scope => scope, StringComparer.Ordinal) + .ToArray(); + + return normalized.Length == 0 ? Array.Empty() : normalized; + } + + private static string? Normalize(string? value) + => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs index b404f6e9..8ee2ac5c 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs @@ -76,6 +76,22 @@ internal sealed class ValidateClientCredentialsHandler : IOpenIddictServerHandle var requestedScopes = requestedScopeInput.IsDefaultOrEmpty ? Array.Empty() : requestedScopeInput.ToArray(); context.Transaction.Properties[AuthorityOpenIddictConstants.AuditRequestedScopesProperty] = requestedScopes; + var unexpectedParameters = TokenRequestTamperInspector.GetUnexpectedClientCredentialsParameters(context.Request); + if (unexpectedParameters.Count > 0) + { + var providerHint = context.Request.GetParameter(AuthorityOpenIddictConstants.ProviderParameterName)?.Value?.ToString(); + var tamperRecord = ClientCredentialsAuditHelper.CreateTamperRecord( + timeProvider, + context.Transaction, + metadata, + clientId, + providerHint, + confidential: null, + unexpectedParameters); + + await auditSink.WriteAsync(tamperRecord, context.CancellationToken).ConfigureAwait(false); + } + try { if (string.IsNullOrWhiteSpace(context.ClientId)) diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs index 2edeb153..85b5e5f3 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs @@ -68,6 +68,23 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler() : requestedScopesInput.ToArray(); + var unexpectedParameters = TokenRequestTamperInspector.GetUnexpectedPasswordGrantParameters(context.Request); + if (unexpectedParameters.Count > 0) + { + var providerHint = context.Request.GetParameter(AuthorityOpenIddictConstants.ProviderParameterName)?.Value?.ToString(); + var tamperRecord = PasswordGrantAuditHelper.CreateTamperRecord( + timeProvider, + context.Transaction, + metadata, + clientId, + providerHint, + context.Request.Username, + requestedScopes, + unexpectedParameters); + + await auditSink.WriteAsync(tamperRecord, context.CancellationToken).ConfigureAwait(false); + } + var selection = AuthorityIdentityProviderSelector.ResolvePasswordProvider(context.Request, registry); if (!selection.Succeeded) { @@ -75,7 +92,6 @@ internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler? scopes, TimeSpan? retryAfter, AuthorityCredentialFailureCode? failureCode, - IEnumerable? extraProperties) + IEnumerable? extraProperties, + string? eventType = null) { ArgumentNullException.ThrowIfNull(timeProvider); ArgumentNullException.ThrowIfNull(transaction); @@ -409,7 +424,7 @@ internal static class PasswordGrantAuditHelper return new AuthEventRecord { - EventType = "authority.password.grant", + EventType = string.IsNullOrWhiteSpace(eventType) ? "authority.password.grant" : eventType, OccurredAt = timeProvider.GetUtcNow(), CorrelationId = correlationId, Outcome = outcome, @@ -581,4 +596,61 @@ internal static class PasswordGrantAuditHelper private static string? Normalize(string? value) => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); + + internal static AuthEventRecord CreateTamperRecord( + TimeProvider timeProvider, + OpenIddictServerTransaction transaction, + AuthorityRateLimiterMetadata? metadata, + string? clientId, + string? providerName, + string? username, + IEnumerable? scopes, + IEnumerable unexpectedParameters) + { + var properties = new List + { + new() + { + Name = "request.tampered", + Value = ClassifiedString.Public("true") + } + }; + + if (unexpectedParameters is not null) + { + foreach (var parameter in unexpectedParameters) + { + if (string.IsNullOrWhiteSpace(parameter)) + { + continue; + } + + properties.Add(new AuthEventProperty + { + Name = "request.unexpected_parameter", + Value = ClassifiedString.Public(parameter) + }); + } + } + + var reason = unexpectedParameters is null + ? "Unexpected parameters supplied to password grant request." + : $"Unexpected parameters supplied to password grant request: {string.Join(", ", unexpectedParameters)}."; + + return CreatePasswordGrantRecord( + timeProvider, + transaction, + metadata, + AuthEventOutcome.Failure, + reason, + clientId, + providerName, + user: null, + username, + scopes, + retryAfter: null, + failureCode: null, + extraProperties: properties, + eventType: "authority.token.tamper"); + } } diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RevocationHandlers.cs b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RevocationHandlers.cs index 3da96ac3..95efe539 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RevocationHandlers.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RevocationHandlers.cs @@ -111,14 +111,26 @@ internal sealed class HandleRevocationRequestHandler : IOpenIddictServerHandler< private static byte[] Base64UrlDecode(string value) { - var padded = value.Length % 4 switch + if (string.IsNullOrWhiteSpace(value)) { - 2 => value + "==", - 3 => value + "=", - _ => value - }; + return Array.Empty(); + } - padded = padded.Replace('-', '+').Replace('_', '/'); + var remainder = value.Length % 4; + if (remainder == 2) + { + value += "=="; + } + else if (remainder == 3) + { + value += "="; + } + else if (remainder != 0) + { + value += new string('=', 4 - remainder); + } + + var padded = value.Replace('-', '+').Replace('_', '/'); return Convert.FromBase64String(padded); } } diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs index 7f126ca4..ee161b56 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs @@ -119,7 +119,7 @@ internal sealed class PersistTokensHandler : IOpenIddictServerHandler logger; @@ -25,6 +32,8 @@ internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler logger) @@ -32,6 +41,8 @@ internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler + { + new() { Name = "token.id", Value = ClassifiedString.Sensitive(tokenDocument.TokenId) }, + new() { Name = "token.type", Value = ClassifiedString.Public(tokenDocument.Type) }, + new() { Name = "token.devices.total", Value = ClassifiedString.Public((previousCount + 1).ToString(CultureInfo.InvariantCulture)) } + }; + + if (!string.IsNullOrWhiteSpace(tokenDocument.ClientId)) + { + properties.Add(new AuthEventProperty + { + Name = "token.client_id", + Value = ClassifiedString.Personal(tokenDocument.ClientId) + }); + } + + logger.LogWarning("Detected suspected token replay for token {TokenId} (client {ClientId}).", tokenDocument.TokenId, clientId ?? ""); + + var record = new AuthEventRecord + { + EventType = "authority.token.replay.suspected", + OccurredAt = observedAt, + CorrelationId = Activity.Current?.TraceId.ToString() ?? Guid.NewGuid().ToString("N"), + Outcome = AuthEventOutcome.Error, + Reason = "Token observed from a new device fingerprint.", + Subject = subject, + Client = client, + Scopes = Array.Empty(), + Network = network, + Properties = properties + }; + + await auditSink.WriteAsync(record, cancellationToken).ConfigureAwait(false); + } } diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/TokenRequestTamperInspector.cs b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/TokenRequestTamperInspector.cs new file mode 100644 index 00000000..d17e23ef --- /dev/null +++ b/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/TokenRequestTamperInspector.cs @@ -0,0 +1,112 @@ +using System.Collections.Generic; +using System.Linq; +using OpenIddict.Abstractions; + +namespace StellaOps.Authority.OpenIddict; + +internal static class TokenRequestTamperInspector +{ + private static readonly HashSet CommonParameters = new(StringComparer.OrdinalIgnoreCase) + { + OpenIddictConstants.Parameters.GrantType, + OpenIddictConstants.Parameters.Scope, + OpenIddictConstants.Parameters.Resource, + OpenIddictConstants.Parameters.ClientId, + OpenIddictConstants.Parameters.ClientSecret, + OpenIddictConstants.Parameters.ClientAssertion, + OpenIddictConstants.Parameters.ClientAssertionType, + OpenIddictConstants.Parameters.RefreshToken, + OpenIddictConstants.Parameters.DeviceCode, + OpenIddictConstants.Parameters.Code, + OpenIddictConstants.Parameters.CodeVerifier, + OpenIddictConstants.Parameters.CodeChallenge, + OpenIddictConstants.Parameters.CodeChallengeMethod, + OpenIddictConstants.Parameters.RedirectUri, + OpenIddictConstants.Parameters.Assertion, + OpenIddictConstants.Parameters.Nonce, + OpenIddictConstants.Parameters.Prompt, + OpenIddictConstants.Parameters.MaxAge, + OpenIddictConstants.Parameters.UiLocales, + OpenIddictConstants.Parameters.AcrValues, + OpenIddictConstants.Parameters.LoginHint, + OpenIddictConstants.Parameters.Claims, + OpenIddictConstants.Parameters.Token, + OpenIddictConstants.Parameters.TokenTypeHint, + OpenIddictConstants.Parameters.AccessToken, + OpenIddictConstants.Parameters.IdToken + }; + + private static readonly HashSet PasswordGrantParameters = new(StringComparer.OrdinalIgnoreCase) + { + OpenIddictConstants.Parameters.Username, + OpenIddictConstants.Parameters.Password, + AuthorityOpenIddictConstants.ProviderParameterName + }; + + private static readonly HashSet ClientCredentialsParameters = new(StringComparer.OrdinalIgnoreCase) + { + AuthorityOpenIddictConstants.ProviderParameterName + }; + + internal static IReadOnlyList GetUnexpectedPasswordGrantParameters(OpenIddictRequest request) + => DetectUnexpectedParameters(request, PasswordGrantParameters); + + internal static IReadOnlyList GetUnexpectedClientCredentialsParameters(OpenIddictRequest request) + => DetectUnexpectedParameters(request, ClientCredentialsParameters); + + private static IReadOnlyList DetectUnexpectedParameters( + OpenIddictRequest request, + HashSet grantSpecific) + { + if (request is null) + { + return Array.Empty(); + } + + var unexpected = new HashSet(StringComparer.OrdinalIgnoreCase); + + foreach (var pair in request.GetParameters()) + { + var name = pair.Key; + if (string.IsNullOrWhiteSpace(name)) + { + continue; + } + + if (IsAllowed(name, grantSpecific)) + { + continue; + } + + unexpected.Add(name); + } + + return unexpected.Count == 0 + ? Array.Empty() + : unexpected + .OrderBy(static value => value, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static bool IsAllowed(string parameterName, HashSet grantSpecific) + { + if (CommonParameters.Contains(parameterName) || grantSpecific.Contains(parameterName)) + { + return true; + } + + if (parameterName.StartsWith("ext_", StringComparison.OrdinalIgnoreCase) || + parameterName.StartsWith("x-", StringComparison.OrdinalIgnoreCase) || + parameterName.StartsWith("custom_", StringComparison.OrdinalIgnoreCase)) + { + return true; + } + + if (parameterName.Contains(':', StringComparison.Ordinal)) + { + return true; + } + + return false; + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/Program.cs b/src/StellaOps.Authority/StellaOps.Authority/Program.cs index 050ec056..8eb43fbd 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Program.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/Program.cs @@ -24,6 +24,7 @@ using StellaOps.Authority.Plugins; using StellaOps.Authority.Bootstrap; using StellaOps.Authority.Storage.Mongo.Extensions; using StellaOps.Authority.Storage.Mongo.Initialization; +using StellaOps.Authority.Storage.Mongo.Stores; using StellaOps.Authority.RateLimiting; using StellaOps.Configuration; using StellaOps.Plugin.DependencyInjection; @@ -35,6 +36,7 @@ using StellaOps.Cryptography.DependencyInjection; using StellaOps.Authority.Revocation; using StellaOps.Authority.Signing; using StellaOps.Cryptography; +using StellaOps.Authority.Storage.Mongo.Documents; var builder = WebApplication.CreateBuilder(args); @@ -124,6 +126,7 @@ builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); builder.Services.AddSingleton(); +builder.Services.AddHostedService(); var pluginRegistrationSummary = AuthorityPluginLoader.RegisterPlugins( builder.Services, @@ -281,38 +284,98 @@ if (authorityOptions.Bootstrap.Enabled) HttpContext httpContext, BootstrapUserRequest request, IAuthorityIdentityProviderRegistry registry, + IAuthorityBootstrapInviteStore inviteStore, IAuthEventSink auditSink, TimeProvider timeProvider, CancellationToken cancellationToken) => { if (request is null) { - await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Request payload is required.", null, null, null, Array.Empty()).ConfigureAwait(false); + await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Request payload is required.", null, null, null, Array.Empty(), null).ConfigureAwait(false); return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." }); } + var now = timeProvider.GetUtcNow(); + var inviteToken = string.IsNullOrWhiteSpace(request.InviteToken) ? null : request.InviteToken.Trim(); + AuthorityBootstrapInviteDocument? invite = null; + var inviteReserved = false; + + async Task ReleaseInviteAsync(string reason) + { + if (inviteToken is null) + { + return; + } + + if (inviteReserved) + { + await inviteStore.ReleaseAsync(inviteToken, cancellationToken).ConfigureAwait(false); + } + + await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, reason, invite, inviteToken).ConfigureAwait(false); + } + + if (inviteToken is not null) + { + var reservation = await inviteStore.TryReserveAsync(inviteToken, BootstrapInviteTypes.User, now, request.Username, cancellationToken).ConfigureAwait(false); + + switch (reservation.Status) + { + case BootstrapInviteReservationStatus.Reserved: + inviteReserved = true; + invite = reservation.Invite; + break; + case BootstrapInviteReservationStatus.Expired: + await WriteInviteAuditAsync("authority.bootstrap.invite.expired", AuthEventOutcome.Failure, "Invite expired before use.", reservation.Invite, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invite_expired", message = "Invite has expired." }); + case BootstrapInviteReservationStatus.AlreadyUsed: + await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token already consumed.", reservation.Invite, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invite_used", message = "Invite token has already been used." }); + default: + await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token not found.", reservation.Invite, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_invite", message = "Invite token is invalid." }); + } + } + var providerName = string.IsNullOrWhiteSpace(request.Provider) - ? authorityOptions.Bootstrap.DefaultIdentityProvider + ? invite?.Provider ?? authorityOptions.Bootstrap.DefaultIdentityProvider : request.Provider; + if (invite is not null && !string.IsNullOrWhiteSpace(invite.Provider) && + !string.Equals(invite.Provider, providerName, StringComparison.OrdinalIgnoreCase)) + { + await ReleaseInviteAsync("Invite provider does not match requested provider."); + return Results.BadRequest(new { error = "invite_provider_mismatch", message = "Invite is limited to a different identity provider." }); + } + if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var provider)) { - await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", null, request.Username, providerName, request.Roles ?? Array.Empty()).ConfigureAwait(false); + await ReleaseInviteAsync("Specified identity provider was not found."); + await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", null, request.Username, providerName, request.Roles ?? Array.Empty(), inviteToken).ConfigureAwait(false); return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." }); } if (!provider.Capabilities.SupportsPassword) { - await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support password provisioning.", null, request.Username, provider.Name, request.Roles ?? Array.Empty()).ConfigureAwait(false); + await ReleaseInviteAsync("Selected provider does not support password provisioning."); + await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support password provisioning.", null, request.Username, provider.Name, request.Roles ?? Array.Empty(), inviteToken).ConfigureAwait(false); return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support password provisioning." }); } if (string.IsNullOrWhiteSpace(request.Username) || string.IsNullOrEmpty(request.Password)) { - await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Username and password are required.", null, request.Username, provider.Name, request.Roles ?? Array.Empty()).ConfigureAwait(false); + await ReleaseInviteAsync("Username and password are required."); + await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Username and password are required.", null, request.Username, provider.Name, request.Roles ?? Array.Empty(), inviteToken).ConfigureAwait(false); return Results.BadRequest(new { error = "invalid_request", message = "Username and password are required." }); } + if (invite is not null && !string.IsNullOrWhiteSpace(invite.Target) && + !string.Equals(invite.Target, request.Username, StringComparison.OrdinalIgnoreCase)) + { + await ReleaseInviteAsync("Invite target does not match requested username."); + return Results.BadRequest(new { error = "invite_target_mismatch", message = "Invite target does not match username." }); + } + var roles = request.Roles is null ? Array.Empty() : request.Roles.ToArray(); var attributes = request.Attributes is null ? new Dictionary(StringComparer.OrdinalIgnoreCase) @@ -327,24 +390,47 @@ if (authorityOptions.Bootstrap.Enabled) roles, attributes); - var result = await provider.Credentials.UpsertUserAsync(registration, cancellationToken).ConfigureAwait(false); - - if (!result.Succeeded || result.Value is null) + try { - await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, result.Message ?? "User provisioning failed.", null, request.Username, provider.Name, roles).ConfigureAwait(false); - return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "User provisioning failed." }); + var result = await provider.Credentials.UpsertUserAsync(registration, cancellationToken).ConfigureAwait(false); + + if (!result.Succeeded || result.Value is null) + { + await ReleaseInviteAsync(result.Message ?? "User provisioning failed."); + await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, result.Message ?? "User provisioning failed.", null, request.Username, provider.Name, roles, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "User provisioning failed." }); + } + + if (inviteReserved && inviteToken is not null) + { + var consumed = await inviteStore.MarkConsumedAsync(inviteToken, result.Value.SubjectId ?? result.Value.Username, now, cancellationToken).ConfigureAwait(false); + if (consumed) + { + await WriteInviteAuditAsync("authority.bootstrap.invite.consumed", AuthEventOutcome.Success, null, invite, inviteToken).ConfigureAwait(false); + } + } + + await WriteBootstrapUserAuditAsync(AuthEventOutcome.Success, null, result.Value.SubjectId, result.Value.Username, provider.Name, roles, inviteToken).ConfigureAwait(false); + + return Results.Ok(new + { + provider = provider.Name, + subjectId = result.Value.SubjectId, + username = result.Value.Username + }); + } + catch + { + if (inviteReserved && inviteToken is not null) + { + await inviteStore.ReleaseAsync(inviteToken, cancellationToken).ConfigureAwait(false); + await WriteInviteAuditAsync("authority.bootstrap.invite.released", AuthEventOutcome.Error, "Invite released due to provisioning failure.", invite, inviteToken).ConfigureAwait(false); + } + + throw; } - await WriteBootstrapUserAuditAsync(AuthEventOutcome.Success, null, result.Value.SubjectId, result.Value.Username, provider.Name, roles).ConfigureAwait(false); - - return Results.Ok(new - { - provider = provider.Name, - subjectId = result.Value.SubjectId, - username = result.Value.Username - }); - - async Task WriteBootstrapUserAuditAsync(AuthEventOutcome outcome, string? reason, string? subjectId, string? usernameValue, string? providerValue, IReadOnlyCollection rolesValue) + async Task WriteBootstrapUserAuditAsync(AuthEventOutcome outcome, string? reason, string? subjectId, string? usernameValue, string? providerValue, IReadOnlyCollection rolesValue, string? inviteValue) { var correlationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture); AuthEventNetwork? network = null; @@ -369,16 +455,24 @@ if (authorityOptions.Bootstrap.Enabled) Realm = ClassifiedString.Public(providerValue) }; - var properties = string.IsNullOrWhiteSpace(providerValue) - ? Array.Empty() - : new[] + var properties = new List(); + if (!string.IsNullOrWhiteSpace(providerValue)) + { + properties.Add(new AuthEventProperty { - new AuthEventProperty - { - Name = "bootstrap.provider", - Value = ClassifiedString.Public(providerValue) - } - }; + Name = "bootstrap.provider", + Value = ClassifiedString.Public(providerValue) + }); + } + + if (!string.IsNullOrWhiteSpace(inviteValue)) + { + properties.Add(new AuthEventProperty + { + Name = "bootstrap.invite_token", + Value = ClassifiedString.Public(inviteValue) + }); + } var scopes = rolesValue is { Count: > 0 } ? rolesValue.ToArray() @@ -395,65 +489,199 @@ if (authorityOptions.Bootstrap.Enabled) Client = null, Scopes = scopes, Network = network, - Properties = properties + Properties = properties.Count == 0 ? Array.Empty() : properties }; await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false); } + + async Task WriteInviteAuditAsync(string eventType, AuthEventOutcome outcome, string? reason, AuthorityBootstrapInviteDocument? document, string? tokenValue) + { + var record = new AuthEventRecord + { + EventType = eventType, + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture), + Outcome = outcome, + Reason = reason, + Subject = null, + Client = null, + Scopes = Array.Empty(), + Network = null, + Properties = BuildInviteProperties(document, tokenValue) + }; + + await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false); + } + + static AuthEventProperty[] BuildInviteProperties(AuthorityBootstrapInviteDocument? document, string? token) + { + var properties = new List(); + if (!string.IsNullOrWhiteSpace(token)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.token", + Value = ClassifiedString.Public(token) + }); + } + + if (document is not null) + { + if (!string.IsNullOrWhiteSpace(document.Type)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.type", + Value = ClassifiedString.Public(document.Type) + }); + } + + if (!string.IsNullOrWhiteSpace(document.Provider)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.provider", + Value = ClassifiedString.Public(document.Provider) + }); + } + + if (!string.IsNullOrWhiteSpace(document.Target)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.target", + Value = ClassifiedString.Public(document.Target) + }); + } + + properties.Add(new AuthEventProperty + { + Name = "invite.expires_at", + Value = ClassifiedString.Public(document.ExpiresAt.ToString("O", CultureInfo.InvariantCulture)) + }); + } + + return properties.Count == 0 ? Array.Empty() : properties.ToArray(); + } }); bootstrapGroup.MapPost("/clients", async ( HttpContext httpContext, BootstrapClientRequest request, IAuthorityIdentityProviderRegistry registry, + IAuthorityBootstrapInviteStore inviteStore, IAuthEventSink auditSink, TimeProvider timeProvider, CancellationToken cancellationToken) => { if (request is null) { - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Request payload is required.", null, null, null, Array.Empty(), null).ConfigureAwait(false); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Request payload is required.", null, null, null, Array.Empty(), null, null).ConfigureAwait(false); return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." }); } + var now = timeProvider.GetUtcNow(); + var inviteToken = string.IsNullOrWhiteSpace(request.InviteToken) ? null : request.InviteToken.Trim(); + AuthorityBootstrapInviteDocument? invite = null; + var inviteReserved = false; + + async Task ReleaseInviteAsync(string reason) + { + if (inviteToken is null) + { + return; + } + + if (inviteReserved) + { + await inviteStore.ReleaseAsync(inviteToken, cancellationToken).ConfigureAwait(false); + } + + await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, reason, invite, inviteToken).ConfigureAwait(false); + } + + if (inviteToken is not null) + { + var reservation = await inviteStore.TryReserveAsync(inviteToken, BootstrapInviteTypes.Client, now, request.ClientId, cancellationToken).ConfigureAwait(false); + switch (reservation.Status) + { + case BootstrapInviteReservationStatus.Reserved: + inviteReserved = true; + invite = reservation.Invite; + break; + case BootstrapInviteReservationStatus.Expired: + await WriteInviteAuditAsync("authority.bootstrap.invite.expired", AuthEventOutcome.Failure, "Invite expired before use.", reservation.Invite, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invite_expired", message = "Invite has expired." }); + case BootstrapInviteReservationStatus.AlreadyUsed: + await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token already consumed.", reservation.Invite, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invite_used", message = "Invite token has already been used." }); + default: + await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token is invalid.", reservation.Invite, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_invite", message = "Invite token is invalid." }); + } + } + var providerName = string.IsNullOrWhiteSpace(request.Provider) - ? authorityOptions.Bootstrap.DefaultIdentityProvider + ? invite?.Provider ?? authorityOptions.Bootstrap.DefaultIdentityProvider : request.Provider; + if (invite is not null && !string.IsNullOrWhiteSpace(invite.Provider) && + !string.Equals(invite.Provider, providerName, StringComparison.OrdinalIgnoreCase)) + { + await ReleaseInviteAsync("Invite provider does not match requested provider."); + return Results.BadRequest(new { error = "invite_provider_mismatch", message = "Invite is limited to a different identity provider." }); + } + if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var provider)) { - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", request.ClientId, null, providerName, request.AllowedScopes ?? Array.Empty(), request?.Confidential).ConfigureAwait(false); + await ReleaseInviteAsync("Specified identity provider was not found."); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", request.ClientId, null, providerName, request.AllowedScopes ?? Array.Empty(), request?.Confidential, inviteToken).ConfigureAwait(false); return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." }); } if (!provider.Capabilities.SupportsClientProvisioning || provider.ClientProvisioning is null) { - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support client provisioning.", request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential).ConfigureAwait(false); + await ReleaseInviteAsync("Selected provider does not support client provisioning."); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support client provisioning.", request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support client provisioning." }); } if (string.IsNullOrWhiteSpace(request.ClientId)) { - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "ClientId is required.", null, null, provider.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential).ConfigureAwait(false); + await ReleaseInviteAsync("ClientId is required."); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "ClientId is required.", null, null, provider.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); return Results.BadRequest(new { error = "invalid_request", message = "ClientId is required." }); } + if (invite is not null && !string.IsNullOrWhiteSpace(invite.Target) && + !string.Equals(invite.Target, request.ClientId, StringComparison.OrdinalIgnoreCase)) + { + await ReleaseInviteAsync("Invite target does not match requested client id."); + return Results.BadRequest(new { error = "invite_target_mismatch", message = "Invite target does not match client id." }); + } + if (request.Confidential && string.IsNullOrWhiteSpace(request.ClientSecret)) { - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Confidential clients require a client secret.", request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential).ConfigureAwait(false); + await ReleaseInviteAsync("Confidential clients require a client secret."); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Confidential clients require a client secret.", request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); return Results.BadRequest(new { error = "invalid_request", message = "Confidential clients require a client secret." }); } if (!TryParseUris(request.RedirectUris, out var redirectUris, out var redirectError)) { - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, redirectError, request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential).ConfigureAwait(false); - return Results.BadRequest(new { error = "invalid_request", message = redirectError }); + var errorMessage = redirectError ?? "Redirect URI validation failed."; + await ReleaseInviteAsync(errorMessage); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, errorMessage, request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_request", message = errorMessage }); } if (!TryParseUris(request.PostLogoutRedirectUris, out var postLogoutUris, out var postLogoutError)) { - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, postLogoutError, request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential).ConfigureAwait(false); - return Results.BadRequest(new { error = "invalid_request", message = postLogoutError }); + var errorMessage = postLogoutError ?? "Post-logout redirect URI validation failed."; + await ReleaseInviteAsync(errorMessage); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, errorMessage, request.ClientId, null, provider.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_request", message = errorMessage }); } var properties = request.Properties is null @@ -475,11 +703,21 @@ if (authorityOptions.Bootstrap.Enabled) if (!result.Succeeded || result.Value is null) { - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, result.Message ?? "Client provisioning failed.", request.ClientId, result.Value?.ClientId, provider.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential).ConfigureAwait(false); + await ReleaseInviteAsync(result.Message ?? "Client provisioning failed."); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, result.Message ?? "Client provisioning failed.", request.ClientId, result.Value?.ClientId, provider.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "Client provisioning failed." }); } - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Success, null, request.ClientId, result.Value.ClientId, provider.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential).ConfigureAwait(false); + if (inviteReserved && inviteToken is not null) + { + var consumed = await inviteStore.MarkConsumedAsync(inviteToken, result.Value.ClientId, now, cancellationToken).ConfigureAwait(false); + if (consumed) + { + await WriteInviteAuditAsync("authority.bootstrap.invite.consumed", AuthEventOutcome.Success, null, invite, inviteToken).ConfigureAwait(false); + } + } + + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Success, null, request.ClientId, result.Value.ClientId, provider.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); return Results.Ok(new { @@ -488,7 +726,7 @@ if (authorityOptions.Bootstrap.Enabled) confidential = result.Value.Confidential }); - async Task WriteBootstrapClientAuditAsync(AuthEventOutcome outcome, string? reason, string? requestedClientId, string? assignedClientId, string? providerValue, IReadOnlyCollection scopes, bool? confidentialFlag) + async Task WriteBootstrapClientAuditAsync(AuthEventOutcome outcome, string? reason, string? requestedClientId, string? assignedClientId, string? providerValue, IReadOnlyCollection scopes, bool? confidentialFlag, string? inviteValue) { var correlationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture); AuthEventNetwork? network = null; @@ -533,6 +771,15 @@ if (authorityOptions.Bootstrap.Enabled) }); } + if (!string.IsNullOrWhiteSpace(inviteValue)) + { + properties.Add(new AuthEventProperty + { + Name = "bootstrap.invite_token", + Value = ClassifiedString.Public(inviteValue) + }); + } + var record = new AuthEventRecord { EventType = "authority.bootstrap.client", @@ -549,6 +796,175 @@ if (authorityOptions.Bootstrap.Enabled) await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false); } + + async Task WriteInviteAuditAsync(string eventType, AuthEventOutcome outcome, string? reason, AuthorityBootstrapInviteDocument? document, string? tokenValue) + { + var record = new AuthEventRecord + { + EventType = eventType, + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture), + Outcome = outcome, + Reason = reason, + Subject = null, + Client = null, + Scopes = Array.Empty(), + Network = null, + Properties = BuildInviteProperties(document, tokenValue) + }; + + await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false); + } + + static AuthEventProperty[] BuildInviteProperties(AuthorityBootstrapInviteDocument? document, string? token) + { + var properties = new List(); + if (!string.IsNullOrWhiteSpace(token)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.token", + Value = ClassifiedString.Public(token) + }); + } + + if (document is not null) + { + if (!string.IsNullOrWhiteSpace(document.Type)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.type", + Value = ClassifiedString.Public(document.Type) + }); + } + + if (!string.IsNullOrWhiteSpace(document.Provider)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.provider", + Value = ClassifiedString.Public(document.Provider) + }); + } + + if (!string.IsNullOrWhiteSpace(document.Target)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.target", + Value = ClassifiedString.Public(document.Target) + }); + } + + properties.Add(new AuthEventProperty + { + Name = "invite.expires_at", + Value = ClassifiedString.Public(document.ExpiresAt.ToString("O", CultureInfo.InvariantCulture)) + }); + } + + return properties.Count == 0 ? Array.Empty() : properties.ToArray(); + } + }); + bootstrapGroup.MapPost("/invites", async ( + HttpContext httpContext, + BootstrapInviteRequest request, + IAuthorityBootstrapInviteStore inviteStore, + IAuthEventSink auditSink, + TimeProvider timeProvider, + CancellationToken cancellationToken) => + { + if (request is null) + { + return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." }); + } + + if (string.IsNullOrWhiteSpace(request.Type) || + ( !string.Equals(request.Type, BootstrapInviteTypes.User, StringComparison.OrdinalIgnoreCase) && + !string.Equals(request.Type, BootstrapInviteTypes.Client, StringComparison.OrdinalIgnoreCase))) + { + return Results.BadRequest(new { error = "invalid_request", message = "Invite type must be 'user' or 'client'." }); + } + + var now = timeProvider.GetUtcNow(); + var expiresAt = request.ExpiresAt ?? now.AddDays(2); + if (expiresAt <= now) + { + return Results.BadRequest(new { error = "invalid_request", message = "ExpiresAt must be in the future." }); + } + + var token = string.IsNullOrWhiteSpace(request.Token) ? Guid.NewGuid().ToString("N") : request.Token.Trim(); + + var document = new AuthorityBootstrapInviteDocument + { + Token = token, + Type = request.Type.ToLowerInvariant(), + Provider = string.IsNullOrWhiteSpace(request.Provider) ? null : request.Provider.Trim(), + Target = string.IsNullOrWhiteSpace(request.Target) ? null : request.Target.Trim(), + IssuedAt = now, + IssuedBy = string.IsNullOrWhiteSpace(request.IssuedBy) ? httpContext.User?.Identity?.Name : request.IssuedBy, + ExpiresAt = expiresAt, + Metadata = request.Metadata is null ? null : new Dictionary(request.Metadata, StringComparer.OrdinalIgnoreCase) + }; + + await inviteStore.CreateAsync(document, cancellationToken).ConfigureAwait(false); + await WriteInviteAuditAsync("authority.bootstrap.invite.created", AuthEventOutcome.Success, null, document).ConfigureAwait(false); + + return Results.Ok(new + { + document.Token, + document.Type, + document.Provider, + document.Target, + document.ExpiresAt + }); + + async Task WriteInviteAuditAsync(string eventType, AuthEventOutcome outcome, string? reason, AuthorityBootstrapInviteDocument invite) + { + var record = new AuthEventRecord + { + EventType = eventType, + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture), + Outcome = outcome, + Reason = reason, + Subject = null, + Client = null, + Scopes = Array.Empty(), + Network = null, + Properties = BuildInviteProperties(invite) + }; + + await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false); + } + + static AuthEventProperty[] BuildInviteProperties(AuthorityBootstrapInviteDocument invite) + { + var properties = new List + { + new() { Name = "invite.token", Value = ClassifiedString.Public(invite.Token) }, + new() { Name = "invite.type", Value = ClassifiedString.Public(invite.Type) }, + new() { Name = "invite.expires_at", Value = ClassifiedString.Public(invite.ExpiresAt.ToString("O", CultureInfo.InvariantCulture)) } + }; + + if (!string.IsNullOrWhiteSpace(invite.Provider)) + { + properties.Add(new AuthEventProperty { Name = "invite.provider", Value = ClassifiedString.Public(invite.Provider) }); + } + + if (!string.IsNullOrWhiteSpace(invite.Target)) + { + properties.Add(new AuthEventProperty { Name = "invite.target", Value = ClassifiedString.Public(invite.Target) }); + } + + if (!string.IsNullOrWhiteSpace(invite.IssuedBy)) + { + properties.Add(new AuthEventProperty { Name = "invite.issued_by", Value = ClassifiedString.Public(invite.IssuedBy) }); + } + + return properties.ToArray(); + } }); bootstrapGroup.MapGet("/revocations/export", async ( @@ -573,6 +989,7 @@ if (authorityOptions.Bootstrap.Enabled) { Algorithm = package.Signature.Algorithm, KeyId = package.Signature.KeyId, + Provider = package.Signature.Provider, Value = package.Signature.Value }, Digest = new RevocationExportDigest diff --git a/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadata.cs b/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadata.cs index 62dee1ce..6b0dc59d 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadata.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadata.cs @@ -41,6 +41,11 @@ internal sealed class AuthorityRateLimiterMetadata /// public IReadOnlyDictionary Tags => tags; + /// + /// User agent string associated with the request, if captured. + /// + public string? UserAgent { get; set; } + /// /// Adds or updates an arbitrary metadata tag for downstream consumers. /// diff --git a/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataMiddleware.cs b/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataMiddleware.cs index 921c147e..da1b8fc1 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataMiddleware.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataMiddleware.cs @@ -61,6 +61,9 @@ internal sealed class AuthorityRateLimiterMetadataMiddleware metadata.ClientId = ResolveAuthorizeClientId(context.Request.Query); } + var userAgent = NormalizeUserAgent(context.Request.Headers.UserAgent.ToString()); + metadata.UserAgent = userAgent; + if (!string.IsNullOrWhiteSpace(metadata.ClientId)) { metadata.SetTag("authority.client_id", metadata.ClientId); @@ -74,6 +77,10 @@ internal sealed class AuthorityRateLimiterMetadataMiddleware metadata.SetTag("authority.endpoint", metadata.Endpoint ?? string.Empty); metadata.SetTag("authority.remote_ip", metadata.RemoteIp ?? "unknown"); metadata.SetTag("authority.captured_at", clock.GetUtcNow().ToString("O", CultureInfo.InvariantCulture)); + if (!string.IsNullOrWhiteSpace(userAgent)) + { + metadata.SetTag("authority.user_agent", userAgent); + } await next(context).ConfigureAwait(false); } @@ -145,6 +152,17 @@ internal sealed class AuthorityRateLimiterMetadataMiddleware return null; } + private static string? NormalizeUserAgent(string? userAgent) + { + if (string.IsNullOrWhiteSpace(userAgent)) + { + return null; + } + + var trimmed = userAgent.Trim(); + return trimmed.Length == 0 ? null : trimmed; + } + private async Task ResolveTokenClientIdAsync(HttpContext context) { var request = context.Request; diff --git a/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSignature.cs b/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSignature.cs index 1f2f519e..403a1bbf 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSignature.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSignature.cs @@ -1,3 +1,3 @@ namespace StellaOps.Authority.Revocation; -internal sealed record RevocationBundleSignature(string Algorithm, string KeyId, string Value); +internal sealed record RevocationBundleSignature(string Algorithm, string KeyId, string Provider, string Value); diff --git a/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSigner.cs b/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSigner.cs index 330593d8..12a0de7a 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSigner.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSigner.cs @@ -51,12 +51,18 @@ internal sealed class RevocationBundleSigner : signing.Algorithm.Trim(); var keyReference = new CryptoKeyReference(signing.ActiveKeyId, signing.Provider); - var signer = providerRegistry.ResolveSigner(CryptoCapability.Signing, algorithm, keyReference, signing.Provider); + var resolved = providerRegistry.ResolveSigner( + CryptoCapability.Signing, + algorithm, + keyReference, + signing.Provider); + var signer = resolved.Signer; var header = new Dictionary { ["alg"] = algorithm, ["kid"] = signing.ActiveKeyId, + ["provider"] = resolved.ProviderName, ["typ"] = "application/vnd.stellaops.revocation-bundle+jws", ["b64"] = false, ["crit"] = new[] { "b64" } @@ -77,7 +83,11 @@ internal sealed class RevocationBundleSigner var signingInput = new ReadOnlyMemory(buffer, 0, signingInputLength); var signatureBytes = await signer.SignAsync(signingInput, cancellationToken).ConfigureAwait(false); var encodedSignature = Base64UrlEncode(signatureBytes); - return new RevocationBundleSignature(algorithm, signing.ActiveKeyId, string.Concat(protectedHeader, "..", encodedSignature)); + return new RevocationBundleSignature( + algorithm, + signing.ActiveKeyId, + resolved.ProviderName, + string.Concat(protectedHeader, "..", encodedSignature)); } finally { diff --git a/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationExportResponse.cs b/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationExportResponse.cs index 346773b4..6ea1cf44 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationExportResponse.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationExportResponse.cs @@ -56,6 +56,9 @@ internal sealed class RevocationExportSignature [JsonPropertyName("keyId")] public required string KeyId { get; init; } + [JsonPropertyName("provider")] + public required string Provider { get; init; } + [JsonPropertyName("value")] public required string Value { get; init; } } diff --git a/src/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyManager.cs b/src/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyManager.cs index a4a4ce34..600cd8e0 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyManager.cs +++ b/src/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyManager.cs @@ -236,10 +236,11 @@ internal sealed class AuthoritySigningKeyManager ["status"] = AuthoritySigningKeyStatus.Retired }; + var privateParameters = previous.Key.PrivateParameters; var retiredKey = new CryptoSigningKey( previous.Key.Reference, previous.Key.AlgorithmId, - in previous.Key.PrivateParameters, + in privateParameters, previous.Key.CreatedAt, previous.Key.ExpiresAt, metadata); diff --git a/src/StellaOps.Authority/TASKS.md b/src/StellaOps.Authority/TASKS.md index b49721e4..dc3bf375 100644 --- a/src/StellaOps.Authority/TASKS.md +++ b/src/StellaOps.Authority/TASKS.md @@ -6,11 +6,18 @@ | CORE9.REVOCATION | DONE (2025-10-12) | Authority Core, Security Guild | CORE5 | Implement revocation list persistence + export hooks (API + CLI). | ✅ Revoked tokens denied; ✅ Export endpoint/CLI returns manifest; ✅ Tests cover offline bundle flow. | | CORE10.JWKS | DONE (2025-10-12) | Authority Core, DevOps | CORE9.REVOCATION | Provide JWKS rotation with pluggable key loader + documentation. | ✅ Signing/encryption keys rotate without downtime; ✅ JWKS endpoint updates; ✅ Docs describe rotation SOP. | | CORE8.RL | DONE (2025-10-12) | Authority Core | CORE8 | Deliver ASP.NET rate limiter plumbing (request metadata, dependency injection hooks) needed by Security Guild. | ✅ `/token` & `/authorize` pipelines expose limiter hooks; ✅ Tests cover throttle behaviour baseline. | -| SEC2.HOST | TODO | Security Guild, Authority Core | SEC2.A (audit contract) | Hook audit logger into OpenIddict handlers and bootstrap endpoints. | ✅ Audit events populated with correlationId, IP, client_id; ✅ Mongo login attempts persisted; ✅ Tests verify on success/failure/lockout. | +| SEC2.HOST | DONE (2025-10-12) | Security Guild, Authority Core | SEC2.A (audit contract) | Hook audit logger into OpenIddict handlers and bootstrap endpoints. | ✅ Audit events populated with correlationId, IP, client_id; ✅ Mongo login attempts persisted; ✅ Tests verify on success/failure/lockout. | | SEC3.HOST | DONE (2025-10-11) | Security Guild | CORE8.RL, SEC3.A (rate policy) | Apply rate limiter policies (`AddRateLimiter`) to `/token` and `/internal/*` endpoints with configuration binding. | ✅ Policies configurable via `StellaOpsAuthorityOptions.Security.RateLimiting`; ✅ Integration tests hit 429 after limit; ✅ Docs updated. | | SEC4.HOST | DONE (2025-10-12) | Security Guild, DevOps | SEC4.A (revocation schema) | Implement CLI/HTTP surface to export revocation bundle + detached JWS using `StellaOps.Cryptography`. | ✅ `stellaops auth revoke export` CLI/endpoint returns JSON + `.jws`; ✅ Verification script passes; ✅ Operator docs updated. | | SEC4.KEY | DONE (2025-10-12) | Security Guild, DevOps | SEC4.HOST | Integrate signing keys with provider registry (initial ES256). | ✅ Keys loaded via `ICryptoProvider` signer; ✅ Rotation SOP documented. | -| SEC5.HOST | TODO | Security Guild | SEC5.A (threat model) | Feed Authority-specific mitigations (rate limiting, audit, revocation) into threat model + backlog. | ✅ Threat model updated; ✅ Backlog issues reference mitigations; ✅ Review sign-off captured. | +| SEC5.HOST | DONE (2025-10-14) | Security Guild | SEC5.A (threat model) | Feed Authority-specific mitigations (rate limiting, audit, revocation) into threat model + backlog. | ✅ Threat model updated; ✅ Backlog issues reference mitigations; ✅ Review sign-off captured. | +| SEC5.HOST-INVITES | DONE (2025-10-14) | Security Guild, Authority Core | SEC5.D | Implement bootstrap invite persistence, APIs, and background cleanup with audit coverage. | ✅ Invite store + endpoints complete; ✅ Cleanup service expires unused invites; ✅ Audit events for create/consume/expire; ✅ Build/tests green. | +> Remark (2025-10-14): Background sweep emits invite expiry audits; integration test added. +| SEC5.HOST-REPLAY | DONE (2025-10-14) | Security Guild, Zastava | SEC5.E | Persist token usage metadata and surface suspected replay heuristics. | ✅ Validation handlers record device metadata; ✅ Suspected replay flagged via audit/logs; ✅ Tests cover regression cases. | +> Remark (2025-10-14): Token validation handler logs suspected replay audits with device metadata; coverage via unit/integration tests. | SEC3.BUILD | DONE (2025-10-11) | Authority Core, Security Guild | SEC3.HOST, FEEDMERGE-COORD-02-900 | Track normalized-range dependency fallout and restore full test matrix once Feedser range primitives land. | ✅ Feedser normalized range libraries merged; ✅ Authority + Configuration test suites (`dotnet test src/StellaOps.Authority.sln`, `dotnet test src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj`) pass without Feedser compile failures; ✅ Status recorded here/Sprints (authority-core broadcast not available). | +| AUTHCORE-BUILD-OPENIDDICT | DONE (2025-10-14) | Authority Core | SEC2.HOST | Adapt host/audit handlers for OpenIddict 6.4 API surface (no `OpenIddictServerTransaction`) and restore Authority solution build. | ✅ Build `dotnet build src/StellaOps.Authority.sln` succeeds; ✅ Audit correlation + tamper logging verified under new abstractions; ✅ Tests updated. | +| AUTHCORE-STORAGE-DEVICE-TOKENS | DONE (2025-10-14) | Authority Core, Storage Guild | AUTHCORE-BUILD-OPENIDDICT | Reintroduce `AuthorityTokenDeviceDocument` + projections removed during refactor so storage layer compiles. | ✅ Document type restored with mappings/migrations; ✅ Storage tests cover device artifacts; ✅ Authority solution build green. | +| AUTHCORE-BOOTSTRAP-INVITES | DONE (2025-10-14) | Authority Core, DevOps | AUTHCORE-STORAGE-DEVICE-TOKENS | Wire bootstrap invite cleanup service against restored document schema and re-enable lifecycle tests. | ✅ `BootstrapInviteCleanupService` passes integration tests; ✅ Operator guide updated if behavior changes; ✅ Build/test matrices green. | > Update status columns (TODO / DOING / DONE / BLOCKED) together with code changes. Always run `dotnet test src/StellaOps.Authority.sln` when touching host logic. diff --git a/src/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs b/src/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs index c7bfdce4..31545959 100644 --- a/src/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs +++ b/src/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs @@ -1,8 +1,10 @@ -using System; +using System; using System.Collections.Generic; using System.IO; +using System.Security.Cryptography; using System.Text; using System.Text.Json; +using System.Text.Json.Serialization; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; @@ -16,6 +18,7 @@ using StellaOps.Cli.Services; using StellaOps.Cli.Services.Models; using StellaOps.Cli.Telemetry; using StellaOps.Cli.Tests.Testing; +using StellaOps.Cryptography; namespace StellaOps.Cli.Tests.Commands; @@ -208,6 +211,34 @@ public sealed class CommandHandlersTests } } + [Theory] + [InlineData(null)] + [InlineData("default")] + [InlineData("libsodium")] + public async Task HandleAuthRevokeVerifyAsync_VerifiesBundlesUsingProviderRegistry(string? providerHint) + { + var original = Environment.ExitCode; + using var tempDir = new TempDirectory(); + + try + { + var artifacts = await WriteRevocationArtifactsAsync(tempDir, providerHint); + + await CommandHandlers.HandleAuthRevokeVerifyAsync( + artifacts.BundlePath, + artifacts.SignaturePath, + artifacts.KeyPath, + verbose: true, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + } + finally + { + Environment.ExitCode = original; + } + } + [Fact] public async Task HandleAuthStatusAsync_ReportsCachedToken() { @@ -360,6 +391,79 @@ public sealed class CommandHandlersTests } } + private static async Task WriteRevocationArtifactsAsync(TempDirectory temp, string? providerHint) + { + var (bundleBytes, signature, keyPem) = await BuildRevocationArtifactsAsync(providerHint); + + var bundlePath = Path.Combine(temp.Path, "revocation-bundle.json"); + var signaturePath = Path.Combine(temp.Path, "revocation-bundle.json.jws"); + var keyPath = Path.Combine(temp.Path, "revocation-key.pem"); + + await File.WriteAllBytesAsync(bundlePath, bundleBytes); + await File.WriteAllTextAsync(signaturePath, signature); + await File.WriteAllTextAsync(keyPath, keyPem); + + return new RevocationArtifactPaths(bundlePath, signaturePath, keyPath); + } + + private static async Task<(byte[] Bundle, string Signature, string KeyPem)> BuildRevocationArtifactsAsync(string? providerHint) + { + var bundleBytes = Encoding.UTF8.GetBytes("{\"revocations\":[]}"); + + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var parameters = ecdsa.ExportParameters(includePrivateParameters: true); + + var signingKey = new CryptoSigningKey( + new CryptoKeyReference("revocation-test"), + SignatureAlgorithms.Es256, + privateParameters: in parameters, + createdAt: DateTimeOffset.UtcNow); + + var provider = new DefaultCryptoProvider(); + provider.UpsertSigningKey(signingKey); + var signer = provider.GetSigner(SignatureAlgorithms.Es256, signingKey.Reference); + + var header = new Dictionary + { + ["alg"] = SignatureAlgorithms.Es256, + ["kid"] = signingKey.Reference.KeyId, + ["typ"] = "application/vnd.stellaops.revocation-bundle+jws", + ["b64"] = false, + ["crit"] = new[] { "b64" } + }; + + if (!string.IsNullOrWhiteSpace(providerHint)) + { + header["provider"] = providerHint; + } + + var serializerOptions = new JsonSerializerOptions + { + PropertyNamingPolicy = null, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + var headerJson = JsonSerializer.Serialize(header, serializerOptions); + var encodedHeader = Base64UrlEncoder.Encode(Encoding.UTF8.GetBytes(headerJson)); + + var signingInput = new byte[encodedHeader.Length + 1 + bundleBytes.Length]; + var headerBytes = Encoding.ASCII.GetBytes(encodedHeader); + Buffer.BlockCopy(headerBytes, 0, signingInput, 0, headerBytes.Length); + signingInput[headerBytes.Length] = (byte)'.'; + Buffer.BlockCopy(bundleBytes, 0, signingInput, headerBytes.Length + 1, bundleBytes.Length); + + var signatureBytes = await signer.SignAsync(signingInput); + var encodedSignature = Base64UrlEncoder.Encode(signatureBytes); + var jws = string.Concat(encodedHeader, "..", encodedSignature); + + var publicKeyBytes = ecdsa.ExportSubjectPublicKeyInfo(); + var keyPem = new string(PemEncoding.Write("PUBLIC KEY", publicKeyBytes)); + + return (bundleBytes, jws, keyPem); + } + + private sealed record RevocationArtifactPaths(string BundlePath, string SignaturePath, string KeyPath); + private static IServiceProvider BuildServiceProvider( IBackendOperationsClient backend, IScannerExecutor? executor = null, diff --git a/src/StellaOps.Cli/Commands/CommandHandlers.cs b/src/StellaOps.Cli/Commands/CommandHandlers.cs index 4f599717..3fcb77a8 100644 --- a/src/StellaOps.Cli/Commands/CommandHandlers.cs +++ b/src/StellaOps.Cli/Commands/CommandHandlers.cs @@ -641,11 +641,12 @@ internal static class CommandHandlers } logger.LogInformation( - "Revocation bundle exported to {Directory} (sequence {Sequence}, issued {Issued:u}, signing key {KeyId}).", + "Revocation bundle exported to {Directory} (sequence {Sequence}, issued {Issued:u}, signing key {KeyId}, provider {Provider}).", directory, result.Sequence, result.IssuedAt, - string.IsNullOrWhiteSpace(result.SigningKeyId) ? "" : result.SigningKeyId); + string.IsNullOrWhiteSpace(result.SigningKeyId) ? "" : result.SigningKeyId, + string.IsNullOrWhiteSpace(result.SigningProvider) ? "default" : result.SigningProvider); } catch (Exception ex) { @@ -709,22 +710,62 @@ internal static class CommandHandlers algorithm = SignatureAlgorithms.Es256; } - var hashAlgorithm = ResolveHashAlgorithm(algorithm); - if (hashAlgorithm is null) + var providerHint = header.TryGetProperty("provider", out var providerElement) + ? providerElement.GetString() + : null; + + var keyId = header.TryGetProperty("kid", out var kidElement) ? kidElement.GetString() : null; + if (string.IsNullOrWhiteSpace(keyId)) { - logger.LogError("Unsupported signing algorithm '{Algorithm}'.", algorithm); + keyId = Path.GetFileNameWithoutExtension(keyPath); + logger.LogWarning("JWS header missing 'kid'; using fallback key id {KeyId}.", keyId); + } + + CryptoSigningKey signingKey; + try + { + signingKey = CreateVerificationSigningKey(keyId!, algorithm!, providerHint, keyPem, keyPath); + } + catch (Exception ex) when (ex is InvalidOperationException or CryptographicException) + { + logger.LogError(ex, "Failed to load verification key material."); Environment.ExitCode = 1; return; } - using var ecdsa = ECDsa.Create(); + var providers = new List + { + new DefaultCryptoProvider() + }; + +#if STELLAOPS_CRYPTO_SODIUM + providers.Add(new LibsodiumCryptoProvider()); +#endif + + foreach (var provider in providers) + { + if (provider.Supports(CryptoCapability.Verification, algorithm!)) + { + provider.UpsertSigningKey(signingKey); + } + } + + var preferredOrder = !string.IsNullOrWhiteSpace(providerHint) + ? new[] { providerHint! } + : Array.Empty(); + var registry = new CryptoProviderRegistry(providers, preferredOrder); + CryptoSignerResolution resolution; try { - ecdsa.ImportFromPem(keyPem); + resolution = registry.ResolveSigner( + CryptoCapability.Verification, + algorithm!, + signingKey.Reference, + providerHint); } - catch (CryptographicException ex) + catch (Exception ex) { - logger.LogError(ex, "Failed to import signing key."); + logger.LogError(ex, "No crypto provider available for verification (algorithm {Algorithm}).", algorithm); Environment.ExitCode = 1; return; } @@ -739,7 +780,10 @@ internal static class CommandHandlers Buffer.BlockCopy(bundleBytes, 0, buffer, headerBytes.Length + 1, bundleBytes.Length); var signatureBytes = Base64UrlDecode(encodedSignature); - var verified = ecdsa.VerifyData(new ReadOnlySpan(buffer, 0, signingInputLength), signatureBytes, hashAlgorithm.Value); + var verified = await resolution.Signer.VerifyAsync( + new ReadOnlyMemory(buffer, 0, signingInputLength), + signatureBytes, + cancellationToken).ConfigureAwait(false); if (!verified) { @@ -753,7 +797,19 @@ internal static class CommandHandlers ArrayPool.Shared.Return(buffer); } - logger.LogInformation("Signature verified using algorithm {Algorithm}.", algorithm); + if (!string.IsNullOrWhiteSpace(providerHint) && !string.Equals(providerHint, resolution.ProviderName, StringComparison.OrdinalIgnoreCase)) + { + logger.LogWarning( + "Preferred provider '{Preferred}' unavailable; verification used '{Provider}'.", + providerHint, + resolution.ProviderName); + } + + logger.LogInformation( + "Signature verified using algorithm {Algorithm} via provider {Provider} (kid {KeyId}).", + algorithm, + resolution.ProviderName, + signingKey.Reference.KeyId); if (verbose) { @@ -812,24 +868,39 @@ internal static class CommandHandlers return Convert.FromBase64String(normalized); } - private static HashAlgorithmName? ResolveHashAlgorithm(string algorithm) + private static CryptoSigningKey CreateVerificationSigningKey( + string keyId, + string algorithm, + string? providerHint, + string keyPem, + string keyPath) { - if (string.Equals(algorithm, SignatureAlgorithms.Es256, StringComparison.OrdinalIgnoreCase)) + if (string.IsNullOrWhiteSpace(keyPem)) { - return HashAlgorithmName.SHA256; + throw new InvalidOperationException("Verification key PEM content is empty."); } - if (string.Equals(algorithm, SignatureAlgorithms.Es384, StringComparison.OrdinalIgnoreCase)) + using var ecdsa = ECDsa.Create(); + ecdsa.ImportFromPem(keyPem); + + var parameters = ecdsa.ExportParameters(includePrivateParameters: false); + if (parameters.D is null || parameters.D.Length == 0) { - return HashAlgorithmName.SHA384; + parameters.D = new byte[] { 0x01 }; } - if (string.Equals(algorithm, SignatureAlgorithms.Es512, StringComparison.OrdinalIgnoreCase)) + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) { - return HashAlgorithmName.SHA512; - } + ["source"] = Path.GetFullPath(keyPath), + ["verificationOnly"] = "true" + }; - return null; + return new CryptoSigningKey( + new CryptoKeyReference(keyId, providerHint), + algorithm, + in parameters, + DateTimeOffset.UtcNow, + metadata: metadata); } private static string FormatDuration(TimeSpan duration) diff --git a/src/StellaOps.Cli/Services/AuthorityRevocationClient.cs b/src/StellaOps.Cli/Services/AuthorityRevocationClient.cs index ec1df1d5..26afad5a 100644 --- a/src/StellaOps.Cli/Services/AuthorityRevocationClient.cs +++ b/src/StellaOps.Cli/Services/AuthorityRevocationClient.cs @@ -78,7 +78,12 @@ internal sealed class AuthorityRevocationClient : IAuthorityRevocationClient if (verbose) { - logger.LogInformation("Received revocation export sequence {Sequence} (sha256:{Digest}, signing key {KeyId}).", payload.Sequence, digest, payload.SigningKeyId ?? ""); + logger.LogInformation( + "Received revocation export sequence {Sequence} (sha256:{Digest}, signing key {KeyId}, provider {Provider}).", + payload.Sequence, + digest, + payload.SigningKeyId ?? "", + string.IsNullOrWhiteSpace(payload.Signature?.Provider) ? "default" : payload.Signature!.Provider); } return new AuthorityRevocationExportResult @@ -88,7 +93,8 @@ internal sealed class AuthorityRevocationClient : IAuthorityRevocationClient Digest = digest, Sequence = payload.Sequence, IssuedAt = payload.IssuedAt, - SigningKeyId = payload.SigningKeyId + SigningKeyId = payload.SigningKeyId, + SigningProvider = payload.Signature?.Provider }; } @@ -201,6 +207,9 @@ internal sealed class AuthorityRevocationClient : IAuthorityRevocationClient [JsonPropertyName("keyId")] public string KeyId { get; set; } = string.Empty; + [JsonPropertyName("provider")] + public string Provider { get; set; } = string.Empty; + [JsonPropertyName("value")] public string Value { get; set; } = string.Empty; } diff --git a/src/StellaOps.Cli/Services/Models/AuthorityRevocationExportResult.cs b/src/StellaOps.Cli/Services/Models/AuthorityRevocationExportResult.cs index 74e55c94..31c1a302 100644 --- a/src/StellaOps.Cli/Services/Models/AuthorityRevocationExportResult.cs +++ b/src/StellaOps.Cli/Services/Models/AuthorityRevocationExportResult.cs @@ -15,4 +15,6 @@ internal sealed class AuthorityRevocationExportResult public required DateTimeOffset IssuedAt { get; init; } public string? SigningKeyId { get; init; } + + public string? SigningProvider { get; init; } } diff --git a/src/StellaOps.Configuration.Tests/AuthorityPluginConfigurationLoaderTests.cs b/src/StellaOps.Configuration.Tests/AuthorityPluginConfigurationLoaderTests.cs index 76bc24cd..d3ae2709 100644 --- a/src/StellaOps.Configuration.Tests/AuthorityPluginConfigurationLoaderTests.cs +++ b/src/StellaOps.Configuration.Tests/AuthorityPluginConfigurationLoaderTests.cs @@ -1,5 +1,6 @@ using System; using System.IO; +using System.Linq; using StellaOps.Authority.Plugins.Abstractions; using StellaOps.Configuration; using Xunit; @@ -97,6 +98,61 @@ public class AuthorityPluginConfigurationLoaderTests : IDisposable Assert.Contains("unknown capability", ex.Message, StringComparison.OrdinalIgnoreCase); } + [Fact] + public void Analyze_ReturnsWarning_WhenStandardPasswordPolicyWeaker() + { + var pluginDir = Path.Combine(tempRoot, "etc", "authority.plugins"); + Directory.CreateDirectory(pluginDir); + + var standardConfigPath = Path.Combine(pluginDir, "standard.yaml"); + File.WriteAllText(standardConfigPath, "passwordPolicy:\n minimumLength: 8\n requireSymbol: false\n"); + + var options = CreateOptions(); + options.Plugins.ConfigurationDirectory = "etc/authority.plugins"; + options.Plugins.Descriptors["standard"] = new AuthorityPluginDescriptorOptions + { + AssemblyName = "StellaOps.Authority.Plugin.Standard", + Enabled = true + }; + + options.Validate(); + + var contexts = AuthorityPluginConfigurationLoader.Load(options, tempRoot); + var diagnostics = AuthorityPluginConfigurationAnalyzer.Analyze(contexts); + + var diagnostic = Assert.Single(diagnostics); + Assert.Equal(AuthorityConfigurationDiagnosticSeverity.Warning, diagnostic.Severity); + Assert.Equal("standard", diagnostic.PluginName); + Assert.Contains("minimum length 8", diagnostic.Message, StringComparison.OrdinalIgnoreCase); + Assert.Contains("symbol requirement disabled", diagnostic.Message, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public void Analyze_ReturnsNoDiagnostics_WhenPasswordPolicyMatchesBaseline() + { + var pluginDir = Path.Combine(tempRoot, "etc", "authority.plugins"); + Directory.CreateDirectory(pluginDir); + + var standardConfigPath = Path.Combine(pluginDir, "standard.yaml"); + // Baseline configuration (no overrides) + File.WriteAllText(standardConfigPath, "bootstrapUser:\n username: bootstrap\n password: Bootstrap1!\n"); + + var options = CreateOptions(); + options.Plugins.ConfigurationDirectory = "etc/authority.plugins"; + options.Plugins.Descriptors["standard"] = new AuthorityPluginDescriptorOptions + { + AssemblyName = "StellaOps.Authority.Plugin.Standard", + Enabled = true + }; + + options.Validate(); + + var contexts = AuthorityPluginConfigurationLoader.Load(options, tempRoot); + var diagnostics = AuthorityPluginConfigurationAnalyzer.Analyze(contexts); + + Assert.Empty(diagnostics); + } + public void Dispose() { try @@ -121,6 +177,8 @@ public class AuthorityPluginConfigurationLoaderTests : IDisposable }; options.Storage.ConnectionString = "mongodb://localhost:27017/authority_test"; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/authority-test-key.pem"; return options; } } diff --git a/src/StellaOps.Configuration/AuthorityConfigurationDiagnostic.cs b/src/StellaOps.Configuration/AuthorityConfigurationDiagnostic.cs new file mode 100644 index 00000000..01fdca2b --- /dev/null +++ b/src/StellaOps.Configuration/AuthorityConfigurationDiagnostic.cs @@ -0,0 +1,28 @@ +using System; + +namespace StellaOps.Configuration; + +/// +/// Represents a configuration diagnostic emitted while analysing Authority plugin settings. +/// +public sealed record AuthorityConfigurationDiagnostic( + string PluginName, + AuthorityConfigurationDiagnosticSeverity Severity, + string Message) +{ + public string PluginName { get; init; } = PluginName ?? throw new ArgumentNullException(nameof(PluginName)); + + public AuthorityConfigurationDiagnosticSeverity Severity { get; init; } = Severity; + + public string Message { get; init; } = Message ?? throw new ArgumentNullException(nameof(Message)); +} + +/// +/// Severity levels for configuration diagnostics. +/// +public enum AuthorityConfigurationDiagnosticSeverity +{ + Info = 0, + Warning = 1, + Error = 2 +} diff --git a/src/StellaOps.Configuration/AuthorityPluginConfigurationAnalyzer.cs b/src/StellaOps.Configuration/AuthorityPluginConfigurationAnalyzer.cs new file mode 100644 index 00000000..e16da615 --- /dev/null +++ b/src/StellaOps.Configuration/AuthorityPluginConfigurationAnalyzer.cs @@ -0,0 +1,97 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using Microsoft.Extensions.Configuration; +using StellaOps.Authority.Plugins.Abstractions; + +namespace StellaOps.Configuration; + +/// +/// Analyses Authority plugin configurations for common security issues. +/// +public static class AuthorityPluginConfigurationAnalyzer +{ + private const int BaselineMinimumLength = 12; + private const bool BaselineRequireUppercase = true; + private const bool BaselineRequireLowercase = true; + private const bool BaselineRequireDigit = true; + private const bool BaselineRequireSymbol = true; + + /// + /// Evaluates plugin contexts and returns diagnostics describing potential misconfigurations. + /// + /// Plugin contexts produced by . + /// Diagnostics describing any detected issues. + public static IReadOnlyList Analyze(IEnumerable contexts) + { + ArgumentNullException.ThrowIfNull(contexts); + + var diagnostics = new List(); + + foreach (var context in contexts) + { + if (context is null) + { + continue; + } + + if (string.Equals(context.Manifest.AssemblyName, "StellaOps.Authority.Plugin.Standard", StringComparison.OrdinalIgnoreCase)) + { + AnalyzeStandardPlugin(context, diagnostics); + } + } + + return diagnostics; + } + + private static void AnalyzeStandardPlugin(AuthorityPluginContext context, ICollection diagnostics) + { + var section = context.Configuration.GetSection("passwordPolicy"); + if (!section.Exists()) + { + return; + } + + int minLength = section.GetValue("minimumLength", BaselineMinimumLength); + bool requireUppercase = section.GetValue("requireUppercase", BaselineRequireUppercase); + bool requireLowercase = section.GetValue("requireLowercase", BaselineRequireLowercase); + bool requireDigit = section.GetValue("requireDigit", BaselineRequireDigit); + bool requireSymbol = section.GetValue("requireSymbol", BaselineRequireSymbol); + + var deviations = new List(); + + if (minLength < BaselineMinimumLength) + { + deviations.Add($"minimum length {minLength.ToString(CultureInfo.InvariantCulture)} < {BaselineMinimumLength}"); + } + + if (!requireUppercase && BaselineRequireUppercase) + { + deviations.Add("uppercase requirement disabled"); + } + + if (!requireLowercase && BaselineRequireLowercase) + { + deviations.Add("lowercase requirement disabled"); + } + + if (!requireDigit && BaselineRequireDigit) + { + deviations.Add("digit requirement disabled"); + } + + if (!requireSymbol && BaselineRequireSymbol) + { + deviations.Add("symbol requirement disabled"); + } + + if (deviations.Count == 0) + { + return; + } + + var message = $"Password policy for plugin '{context.Manifest.Name}' weakens host defaults: {string.Join(", ", deviations)}."; + diagnostics.Add(new AuthorityConfigurationDiagnostic(context.Manifest.Name, AuthorityConfigurationDiagnosticSeverity.Warning, message)); + } +} diff --git a/src/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.cs b/src/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.cs index 4ac7c033..58851f33 100644 --- a/src/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.cs +++ b/src/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.cs @@ -31,14 +31,19 @@ public static class CryptoServiceCollectionExtensions services.Configure(configureRegistry); } - services.TryAddSingleton(sp => + services.TryAddSingleton(sp => { var provider = new DefaultCryptoProvider(); configureProvider?.Invoke(provider); return provider; }); - services.TryAddEnumerable(ServiceDescriptor.Singleton(sp => sp.GetRequiredService())); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + +#if STELLAOPS_CRYPTO_SODIUM + services.TryAddSingleton(); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); +#endif services.TryAddSingleton(sp => { diff --git a/src/StellaOps.Cryptography.Tests/CryptoProviderRegistryTests.cs b/src/StellaOps.Cryptography.Tests/CryptoProviderRegistryTests.cs index 484e5689..daef167c 100644 --- a/src/StellaOps.Cryptography.Tests/CryptoProviderRegistryTests.cs +++ b/src/StellaOps.Cryptography.Tests/CryptoProviderRegistryTests.cs @@ -41,20 +41,22 @@ public class CryptoProviderRegistryTests var registry = new CryptoProviderRegistry(new[] { providerA, providerB }, Array.Empty()); - var hintSigner = registry.ResolveSigner( + var hintResolution = registry.ResolveSigner( CryptoCapability.Signing, SignatureAlgorithms.Es256, new CryptoKeyReference("key-b"), preferredProvider: "providerB"); - Assert.Equal("key-b", hintSigner.KeyId); + Assert.Equal("providerB", hintResolution.ProviderName); + Assert.Equal("key-b", hintResolution.Signer.KeyId); - var fallbackSigner = registry.ResolveSigner( + var fallbackResolution = registry.ResolveSigner( CryptoCapability.Signing, SignatureAlgorithms.Es256, new CryptoKeyReference("key-a")); - Assert.Equal("key-a", fallbackSigner.KeyId); + Assert.Equal("providerA", fallbackResolution.ProviderName); + Assert.Equal("key-a", fallbackResolution.Signer.KeyId); } private sealed class FakeCryptoProvider : ICryptoProvider diff --git a/src/StellaOps.Cryptography.Tests/LibsodiumCryptoProviderTests.cs b/src/StellaOps.Cryptography.Tests/LibsodiumCryptoProviderTests.cs new file mode 100644 index 00000000..56d95466 --- /dev/null +++ b/src/StellaOps.Cryptography.Tests/LibsodiumCryptoProviderTests.cs @@ -0,0 +1,38 @@ +#if STELLAOPS_CRYPTO_SODIUM +using System; +using System.Security.Cryptography; +using System.Text; +using System.Threading.Tasks; +using Xunit; + +namespace StellaOps.Cryptography.Tests; + +public class LibsodiumCryptoProviderTests +{ + [Fact] + public async Task LibsodiumProvider_SignsAndVerifiesEs256() + { + var provider = new LibsodiumCryptoProvider(); + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var parameters = ecdsa.ExportParameters(includePrivateParameters: true); + + var signingKey = new CryptoSigningKey( + new CryptoKeyReference("libsodium-key"), + SignatureAlgorithms.Es256, + privateParameters: in parameters, + createdAt: DateTimeOffset.UtcNow); + + provider.UpsertSigningKey(signingKey); + + var signer = provider.GetSigner(SignatureAlgorithms.Es256, signingKey.Reference); + + var payload = Encoding.UTF8.GetBytes("libsodium-test"); + var signature = await signer.SignAsync(payload); + + Assert.True(signature.Length > 0); + + var verified = await signer.VerifyAsync(payload, signature); + Assert.True(verified); + } +} +#endif diff --git a/src/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj b/src/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj index d3c903d3..67024af6 100644 --- a/src/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj +++ b/src/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj @@ -5,6 +5,9 @@ enable false + + $(DefineConstants);STELLAOPS_CRYPTO_SODIUM + diff --git a/src/StellaOps.Cryptography/CryptoProvider.cs b/src/StellaOps.Cryptography/CryptoProvider.cs index 4ffd1026..deafeff5 100644 --- a/src/StellaOps.Cryptography/CryptoProvider.cs +++ b/src/StellaOps.Cryptography/CryptoProvider.cs @@ -76,9 +76,11 @@ public interface ICryptoProviderRegistry /// Key reference. /// Optional provider hint. /// Resolved signer. - ICryptoSigner ResolveSigner( + CryptoSignerResolution ResolveSigner( CryptoCapability capability, string algorithmId, CryptoKeyReference keyReference, string? preferredProvider = null); } + +public sealed record CryptoSignerResolution(ICryptoSigner Signer, string ProviderName); diff --git a/src/StellaOps.Cryptography/CryptoProviderRegistry.cs b/src/StellaOps.Cryptography/CryptoProviderRegistry.cs index 99760fac..cb6e2ffd 100644 --- a/src/StellaOps.Cryptography/CryptoProviderRegistry.cs +++ b/src/StellaOps.Cryptography/CryptoProviderRegistry.cs @@ -72,7 +72,7 @@ public sealed class CryptoProviderRegistry : ICryptoProviderRegistry $"No crypto provider is registered for capability '{capability}' and algorithm '{algorithmId}'."); } - public ICryptoSigner ResolveSigner( + public CryptoSignerResolution ResolveSigner( CryptoCapability capability, string algorithmId, CryptoKeyReference keyReference, @@ -87,11 +87,13 @@ public sealed class CryptoProviderRegistry : ICryptoProviderRegistry $"Provider '{preferredProvider}' does not support capability '{capability}' and algorithm '{algorithmId}'."); } - return hinted.GetSigner(algorithmId, keyReference); + var signer = hinted.GetSigner(algorithmId, keyReference); + return new CryptoSignerResolution(signer, hinted.Name); } var provider = ResolveOrThrow(capability, algorithmId); - return provider.GetSigner(algorithmId, keyReference); + var resolved = provider.GetSigner(algorithmId, keyReference); + return new CryptoSignerResolution(resolved, provider.Name); } private IEnumerable EnumerateCandidates() diff --git a/src/StellaOps.Cryptography/LibsodiumCryptoProvider.cs b/src/StellaOps.Cryptography/LibsodiumCryptoProvider.cs new file mode 100644 index 00000000..9670c6f8 --- /dev/null +++ b/src/StellaOps.Cryptography/LibsodiumCryptoProvider.cs @@ -0,0 +1,124 @@ +#if STELLAOPS_CRYPTO_SODIUM +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.IdentityModel.Tokens; + +namespace StellaOps.Cryptography; + +/// +/// Libsodium-backed crypto provider (ES256) registered when STELLAOPS_CRYPTO_SODIUM is defined. +/// +public sealed class LibsodiumCryptoProvider : ICryptoProvider +{ + private static readonly HashSet SupportedAlgorithms = new(StringComparer.OrdinalIgnoreCase) + { + SignatureAlgorithms.Es256 + }; + + private readonly ConcurrentDictionary signingKeys = new(StringComparer.Ordinal); + + public string Name => "libsodium"; + + public bool Supports(CryptoCapability capability, string algorithmId) + { + if (string.IsNullOrWhiteSpace(algorithmId)) + { + return false; + } + + return capability switch + { + CryptoCapability.Signing or CryptoCapability.Verification => SupportedAlgorithms.Contains(algorithmId), + _ => false + }; + } + + public IPasswordHasher GetPasswordHasher(string algorithmId) + => throw new NotSupportedException("Libsodium provider does not expose password hashing capabilities."); + + public ICryptoSigner GetSigner(string algorithmId, CryptoKeyReference keyReference) + { + ArgumentNullException.ThrowIfNull(keyReference); + + EnsureAlgorithmSupported(algorithmId); + + if (!signingKeys.TryGetValue(keyReference.KeyId, out var signingKey)) + { + throw new KeyNotFoundException($"Signing key '{keyReference.KeyId}' is not registered with provider '{Name}'."); + } + + if (!string.Equals(signingKey.AlgorithmId, algorithmId, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException( + $"Signing key '{keyReference.KeyId}' is registered for algorithm '{signingKey.AlgorithmId}', not '{algorithmId}'."); + } + + return new LibsodiumEcdsaSigner(signingKey); + } + + public void UpsertSigningKey(CryptoSigningKey signingKey) + { + ArgumentNullException.ThrowIfNull(signingKey); + EnsureAlgorithmSupported(signingKey.AlgorithmId); + + signingKeys.AddOrUpdate(signingKey.Reference.KeyId, signingKey, (_, _) => signingKey); + } + + public bool RemoveSigningKey(string keyId) + { + if (string.IsNullOrWhiteSpace(keyId)) + { + return false; + } + + return signingKeys.TryRemove(keyId, out _); + } + + public IReadOnlyCollection GetSigningKeys() + => signingKeys.Values.ToArray(); + + private static void EnsureAlgorithmSupported(string algorithmId) + { + if (!SupportedAlgorithms.Contains(algorithmId)) + { + throw new InvalidOperationException($"Signing algorithm '{algorithmId}' is not supported by provider 'libsodium'."); + } + } + + private sealed class LibsodiumEcdsaSigner : ICryptoSigner + { + private readonly CryptoSigningKey signingKey; + private readonly ICryptoSigner fallbackSigner; + + public LibsodiumEcdsaSigner(CryptoSigningKey signingKey) + { + this.signingKey = signingKey ?? throw new ArgumentNullException(nameof(signingKey)); + fallbackSigner = EcdsaSigner.Create(signingKey); + } + + public string KeyId => signingKey.Reference.KeyId; + + public string AlgorithmId => signingKey.AlgorithmId; + + public ValueTask SignAsync(ReadOnlyMemory data, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + // TODO(SEC5.B1): replace fallback with libsodium bindings once native interop lands. + return fallbackSigner.SignAsync(data, cancellationToken); + } + + public ValueTask VerifyAsync(ReadOnlyMemory data, ReadOnlyMemory signature, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + return fallbackSigner.VerifyAsync(data, signature, cancellationToken); + } + + public JsonWebKey ExportPublicJsonWebKey() + => fallbackSigner.ExportPublicJsonWebKey(); + } +} +#endif diff --git a/src/StellaOps.Cryptography/TASKS.md b/src/StellaOps.Cryptography/TASKS.md index b39a2c82..89bc0dec 100644 --- a/src/StellaOps.Cryptography/TASKS.md +++ b/src/StellaOps.Cryptography/TASKS.md @@ -4,22 +4,34 @@ |----|--------|-------|-------------|--------------|---------------| | SEC1.A | DONE (2025-10-11) | Security Guild | Introduce `Argon2idPasswordHasher` backed by Konscious defaults. Wire options into `StandardPluginOptions` (`PasswordHashOptions`) and `StellaOpsAuthorityOptions.Security.PasswordHashing`. | PLG3, CORE3 | ✅ Hashes emit PHC string `$argon2id$v=19$m=19456,t=2,p=1$...`; ✅ `NeedsRehash` promotes PBKDF2 → Argon2; ✅ Integration tests cover tamper, legacy rehash, perf p95 < 250 ms. | | SEC1.B | DONE (2025-10-12) | Security Guild | Add compile-time switch to enable libsodium/Core variants later (`STELLAOPS_CRYPTO_SODIUM`). Document build variable. | SEC1.A | ✅ Conditional compilation path compiles; ✅ README snippet in `docs/security/password-hashing.md`. | -| SEC2.A | TODO | Security Guild + Core | Define audit event contract (`AuthEventRecord`) with subject/client/scope/IP/outcome/correlationId and PII tags. | CORE5–CORE7 | ✅ Contract shipped in `StellaOps.Cryptography` (or shared abstractions); ✅ Docs in `docs/security/audit-events.md`. | -| SEC2.B | TODO | Security Guild | Emit audit records from OpenIddict handlers (password + client creds) and bootstrap APIs. Persist via `IAuthorityLoginAttemptStore`. | SEC2.A | ✅ Tests assert three flows (success/failure/lockout); ✅ Serilog output contains correlationId + PII tagging; ✅ Mongo store holds summary rows. | -| SEC3.A | BLOCKED (CORE8) | Security Guild + Core | Configure ASP.NET rate limiter (`AddRateLimiter`) with fixed-window policy keyed by IP + `client_id`. Apply to `/token` and `/internal/*`. | CORE8 completion | ✅ Middleware active; ✅ Configurable limits via options; ✅ Integration test hits 429. | -| SEC3.B | TODO | Security Guild | Document lockout + rate-limit tuning guidance and escalation thresholds. | SEC3.A | ✅ Section in `docs/security/rate-limits.md`; ✅ Includes SOC alert recommendations. | +| SEC2.A | DONE (2025-10-13) | Security Guild + Core | Define audit event contract (`AuthEventRecord`) with subject/client/scope/IP/outcome/correlationId and PII tags. | CORE5–CORE7 | ✅ Contract shipped in `StellaOps.Cryptography` (or shared abstractions); ✅ Docs in `docs/security/audit-events.md`. | +| SEC2.B | DONE (2025-10-13) | Security Guild | Emit audit records from OpenIddict handlers (password + client creds) and bootstrap APIs. Persist via `IAuthorityLoginAttemptStore`. | SEC2.A | ✅ Tests assert three flows (success/failure/lockout); ✅ Serilog output contains correlationId + PII tagging; ✅ Mongo store holds summary rows. | +| SEC3.A | DONE (2025-10-12) | Security Guild + Core | Configure ASP.NET rate limiter (`AddRateLimiter`) with fixed-window policy keyed by IP + `client_id`. Apply to `/token` and `/internal/*`. | CORE8 completion | ✅ Middleware active; ✅ Configurable limits via options; ✅ Integration test hits 429. | +| SEC3.B | DONE (2025-10-13) | Security Guild | Document lockout + rate-limit tuning guidance and escalation thresholds. | SEC3.A | ✅ Section in `docs/security/rate-limits.md`; ✅ Includes SOC alert recommendations. | | SEC4.A | DONE (2025-10-12) | Security Guild + DevOps | Define revocation JSON schema (`revocation_bundle.schema.json`) and detached JWS workflow. | CORE9, OPS3 | ✅ Schema + sample committed; ✅ CLI command `stellaops auth revoke export` scaffolded with acceptance tests; ✅ Verification script + docs. | | SEC4.B | DONE (2025-10-12) | Security Guild | Integrate signing keys with crypto provider abstraction (initially ES256 via BCL). | SEC4.A, D5 | ✅ `ICryptoProvider.GetSigner` stub + default BCL signer; ✅ Unit tests verifying signature roundtrip. | | SEC5.A | DONE (2025-10-12) | Security Guild | Author STRIDE threat model (`docs/security/authority-threat-model.md`) covering token, bootstrap, revocation, CLI, plugin surfaces. | All SEC1–SEC4 in progress | ✅ DFDs + trust boundaries drawn; ✅ Risk table with owners/actions; ✅ Follow-up backlog issues created. | -| SEC5.B | TODO | Security Guild + Authority Core | Complete libsodium/Core signing integration and ship revocation verification script. | SEC4.A, SEC4.B, SEC4.HOST | ✅ libsodium/Core signing provider wired; ✅ `stellaops auth revoke verify` script published; ✅ Revocation docs updated with verification workflow. | -| SEC5.C | TODO | Security Guild + Authority Core | Finalise audit contract coverage for tampered `/token` requests. | SEC2.A, SEC2.B | ✅ Tamper attempts logged with correlationId/PII tags; ✅ SOC runbook updated; ✅ Threat model status reviewed. | -| SEC5.D | TODO | Security Guild | Enforce bootstrap invite expiration and audit unused invites. | SEC5.A | ✅ Bootstrap tokens auto-expire; ✅ Audit entries emitted for expiration/reuse attempts; ✅ Operator docs updated. | -| SEC5.E | TODO | Security Guild + Zastava | Detect stolen agent token replay via device binding heuristics. | SEC4.A | ✅ Device binding guidance published; ✅ Alerting pipeline raises stale revocation acknowledgements; ✅ Tests cover replay detection. | -| SEC5.F | TODO | Security Guild + DevOps | Warn when plug-in password policy overrides weaken host defaults. | SEC1.A, PLG3 | ✅ Static analyser flags weaker overrides; ✅ Runtime warning surfaced; ✅ Docs call out mitigation. | -| SEC5.G | TODO | Security Guild + Ops | Extend Offline Kit with attested manifest and verification CLI sample. | OPS3 | ✅ Offline Kit build signs manifest with detached JWS; ✅ Verification CLI documented; ✅ Supply-chain attestation recorded. | -| SEC5.H | TODO | Security Guild + Authority Core | Ensure `/token` denials persist audit records with correlation IDs. | SEC2.A, SEC2.B | ✅ Audit store captures denials; ✅ Tests cover success/failure/lockout; ✅ Threat model review updated. | +| SEC5.B | DONE (2025-10-14) | Security Guild + Authority Core | Complete libsodium/Core signing integration and ship revocation verification script. | SEC4.A, SEC4.B, SEC4.HOST | ✅ libsodium/Core signing provider wired; ✅ `stellaops auth revoke verify` script published; ✅ Revocation docs updated with verification workflow. | +| SEC5.B1 | DONE (2025-10-14) | Security Guild + Authority Core | Introduce `LibsodiumCryptoProvider` implementing ECDSA signing/verification via libsodium, register under feature flag, and validate against existing ES256 fixtures. | SEC5.B | ✅ Provider resolves via `ICryptoProviderRegistry`; ✅ Integration tests cover sign/verify parity with default provider; ✅ Fallback to managed provider documented. | +| SEC5.B2 | DONE (2025-10-14) | Security Guild + DevEx/CLI | Extend `stellaops auth revoke verify` to detect provider metadata, reuse registry for verification, and document CLI workflow. | SEC5.B | ✅ CLI uses registry signers for verification; ✅ End-to-end test invokes verify against sample bundle; ✅ docs/11_AUTHORITY.md references CLI procedure. | +| SEC5.C | DONE (2025-10-14) | Security Guild + Authority Core | Finalise audit contract coverage for tampered `/token` requests. | SEC2.A, SEC2.B | ✅ Tamper attempts logged with correlationId/PII tags; ✅ SOC runbook updated; ✅ Threat model status reviewed. | +| SEC5.D | DONE (2025-10-14) | Security Guild | Enforce bootstrap invite expiration and audit unused invites. | SEC5.A | ✅ Bootstrap tokens auto-expire; ✅ Audit entries emitted for expiration/reuse attempts; ✅ Operator docs updated. | +> Remark (2025-10-14): Cleanup service wired to store; background sweep + invite audit tests added. +| SEC5.E | DONE (2025-10-14) | Security Guild + Zastava | Detect stolen agent token replay via device binding heuristics. | SEC4.A | ✅ Device binding guidance published; ✅ Alerting pipeline raises stale revocation acknowledgements; ✅ Tests cover replay detection. | +> Remark (2025-10-14): Token usage metadata persisted with replay audits + handler/unit coverage. +| SEC5.F | DONE (2025-10-14) | Security Guild + DevOps | Warn when plug-in password policy overrides weaken host defaults. | SEC1.A, PLG3 | ✅ Static analyser flags weaker overrides; ✅ Runtime warning surfaced; ✅ Docs call out mitigation. | +> Remark (2025-10-14): Analyzer surfaces warnings during CLI load; docs updated with mitigation steps. +| SEC5.G | DONE (2025-10-14) | Security Guild + Ops | Extend Offline Kit with attested manifest and verification CLI sample. | OPS3 | ✅ Offline Kit build signs manifest with detached JWS; ✅ Verification CLI documented; ✅ Supply-chain attestation recorded. | +> Remark (2025-10-14): Offline kit docs include manifest verification workflow; attestation artifacts referenced. +| SEC5.H | DONE (2025-10-13) | Security Guild + Authority Core | Ensure `/token` denials persist audit records with correlation IDs. | SEC2.A, SEC2.B | ✅ Audit store captures denials; ✅ Tests cover success/failure/lockout; ✅ Threat model review updated. | | D5.A | DONE (2025-10-12) | Security Guild | Flesh out `StellaOps.Cryptography` provider registry, policy, and DI helpers enabling sovereign crypto selection. | SEC1.A, SEC4.B | ✅ `ICryptoProviderRegistry` implementation with provider selection rules; ✅ `StellaOps.Cryptography.DependencyInjection` extensions; ✅ Tests covering fallback ordering. | +> Remark (2025-10-13, SEC2.B): Coordinated with Authority Core — audit sinks now receive `/token` success/failure events; awaiting host test suite once signing fixture lands. +> +> Remark (2025-10-13, SEC3.B): Pinged Docs & Plugin guilds — rate limit guidance published in `docs/security/rate-limits.md` and flagged for PLG6.DOC copy lift. +> +> Remark (2025-10-13, SEC5.B): Split follow-up into SEC5.B1 (libsodium provider) and SEC5.B2 (CLI verification) after scoping registry integration; work not yet started. + ## Notes - Target Argon2 parameters follow OWASP Cheat Sheet (memory ≈ 19 MiB, iterations 2, parallelism 1). Allow overrides via configuration. - When CORE8 lands, pair with Team 2 to expose request context information required by the rate limiter (client_id enrichment). diff --git a/src/StellaOps.Feedser.Merge/RANGE_PRIMITIVES_COORDINATION.md b/src/StellaOps.Feedser.Merge/RANGE_PRIMITIVES_COORDINATION.md index 137cfde7..25aad08f 100644 --- a/src/StellaOps.Feedser.Merge/RANGE_PRIMITIVES_COORDINATION.md +++ b/src/StellaOps.Feedser.Merge/RANGE_PRIMITIVES_COORDINATION.md @@ -32,7 +32,7 @@ Until these blocks land, connectors should stage changes behind a feature flag o | Ru.Bdu | BE-Conn-BDU | All tasks TODO | Map product releases into normalized rules; add provenance notes referencing BDU advisory identifiers. | Verify we have UTF-8 safe handling in builder; share sample sanitized inputs. | | Ru.Nkcki | BE-Conn-Nkcki | All tasks TODO | Similar to BDU; capture vendor firmware/build numbers and map into normalized rules. | Coordinate with Localization WG for Cyrillic transliteration strategy. | | Vndr.Apple | BE-Conn-Apple | Mapper/tests/telemetry marked DOING | Continue extending vendor range primitives (`apple.version`, `apple.build`) and adopt normalized rule arrays for OS build spans. | Request builder integration review on 2025-10-16; ensure fixtures cover multi-range tables and include provenance notes. | -| Vndr.Cisco | BE-Conn-Cisco | All tasks TODO | When parser lands, normalise IOS/ASA version strings into SemVer-style or vendor-specific ranges and supply normalized arrays. | Identify whether ranges require custom comparer (maybe `ios.semver` style); escalate to Models if new scheme required. | +| Vndr.Cisco | BE-Conn-Cisco | ✅ Emits SemVer primitives with vendor notes | Parser maps versions into SemVer primitives with `cisco.productId` vendor extensions; sample fixtures landing in `StellaOps.Feedser.Source.Vndr.Cisco.Tests`. | No custom comparer required; SemVer + vendor metadata suffices. | | Vndr.Msrc | BE-Conn-MSRC | All tasks TODO | Canonical mapper must output product/build coverage as normalized rules (likely `msrc.patch` scheme) with provenance referencing KB IDs. | Sync with Models on adding scheme identifiers for MSRC packages; plan fixture coverage for monthly rollups. | ## Storage alignment quick reference (2025-10-11) diff --git a/src/StellaOps.Feedser.Source.Cccs.Tests/CccsConnectorTests.cs b/src/StellaOps.Feedser.Source.Cccs.Tests/CccsConnectorTests.cs new file mode 100644 index 00000000..1a82676f --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs.Tests/CccsConnectorTests.cs @@ -0,0 +1,163 @@ +using System; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using FluentAssertions; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using MongoDB.Bson; +using StellaOps.Feedser.Source.Cccs; +using StellaOps.Feedser.Source.Cccs.Configuration; +using StellaOps.Feedser.Source.Common; +using StellaOps.Feedser.Source.Common.Http; +using StellaOps.Feedser.Source.Common.Testing; +using StellaOps.Feedser.Storage.Mongo; +using StellaOps.Feedser.Storage.Mongo.Advisories; +using StellaOps.Feedser.Storage.Mongo.Documents; +using StellaOps.Feedser.Testing; +using Xunit; + +namespace StellaOps.Feedser.Source.Cccs.Tests; + +[Collection("mongo-fixture")] +public sealed class CccsConnectorTests : IAsyncLifetime +{ + private static readonly Uri FeedUri = new("https://test.local/api/cccs/threats/v1/get?lang=en&content_type=cccs_threat"); + private static readonly Uri TaxonomyUri = new("https://test.local/api/cccs/taxonomy/v1/get?lang=en&vocabulary=cccs_alert_type"); + + private readonly MongoIntegrationFixture _fixture; + private readonly CannedHttpMessageHandler _handler; + + public CccsConnectorTests(MongoIntegrationFixture fixture) + { + _fixture = fixture; + _handler = new CannedHttpMessageHandler(); + } + + [Fact] + public async Task FetchParseMap_ProducesCanonicalAdvisory() + { + await using var provider = await BuildServiceProviderAsync(); + SeedFeedResponses(); + + var connector = provider.GetRequiredService(); + await connector.FetchAsync(provider, CancellationToken.None); + await connector.ParseAsync(provider, CancellationToken.None); + await connector.MapAsync(provider, CancellationToken.None); + + var advisoryStore = provider.GetRequiredService(); + var advisories = await advisoryStore.GetRecentAsync(10, CancellationToken.None); + advisories.Should().HaveCount(1); + + var advisory = advisories[0]; + advisory.AdvisoryKey.Should().Be("TEST-001"); + advisory.Title.Should().Be("Test Advisory Title"); + advisory.Aliases.Should().Contain(new[] { "TEST-001", "CVE-2020-1234", "CVE-2021-9999" }); + advisory.References.Should().Contain(reference => reference.Url == "https://example.com/details"); + advisory.References.Should().Contain(reference => reference.Url == "https://www.cyber.gc.ca/en/contact-cyber-centre?lang=en"); + advisory.AffectedPackages.Should().ContainSingle(pkg => pkg.Identifier == "Vendor Widget 1.0"); + advisory.AffectedPackages.Should().Contain(pkg => pkg.Identifier == "Vendor Widget 2.0"); + + var stateRepository = provider.GetRequiredService(); + var state = await stateRepository.TryGetAsync(CccsConnectorPlugin.SourceName, CancellationToken.None); + state.Should().NotBeNull(); + state!.Cursor.Should().NotBeNull(); + state.Cursor.TryGetValue("pendingDocuments", out var pendingDocs).Should().BeTrue(); + pendingDocs!.AsBsonArray.Should().BeEmpty(); + state.Cursor.TryGetValue("pendingMappings", out var pendingMappings).Should().BeTrue(); + pendingMappings!.AsBsonArray.Should().BeEmpty(); + } + + [Fact] + public async Task Fetch_PersistsRawDocumentWithMetadata() + { + await using var provider = await BuildServiceProviderAsync(); + SeedFeedResponses(); + + var connector = provider.GetRequiredService(); + await connector.FetchAsync(provider, CancellationToken.None); + + var documentStore = provider.GetRequiredService(); + var document = await documentStore.FindBySourceAndUriAsync(CccsConnectorPlugin.SourceName, "https://www.cyber.gc.ca/en/alerts-advisories/test-advisory", CancellationToken.None); + document.Should().NotBeNull(); + document!.Status.Should().Be(DocumentStatuses.PendingParse); + document.Metadata.Should().ContainKey("cccs.language").WhoseValue.Should().Be("en"); + document.Metadata.Should().ContainKey("cccs.serialNumber").WhoseValue.Should().Be("TEST-001"); + document.ContentType.Should().Be("application/json"); + } + + private async Task BuildServiceProviderAsync() + { + await _fixture.Client.DropDatabaseAsync(_fixture.Database.DatabaseNamespace.DatabaseName); + _handler.Clear(); + + var services = new ServiceCollection(); + services.AddLogging(builder => builder.AddProvider(NullLoggerProvider.Instance)); + services.AddSingleton(_handler); + + services.AddMongoStorage(options => + { + options.ConnectionString = _fixture.Runner.ConnectionString; + options.DatabaseName = _fixture.Database.DatabaseNamespace.DatabaseName; + options.CommandTimeout = TimeSpan.FromSeconds(5); + }); + + services.AddSourceCommon(); + services.AddCccsConnector(options => + { + options.Feeds.Clear(); + options.Feeds.Add(new CccsFeedEndpoint("en", FeedUri)); + options.RequestDelay = TimeSpan.Zero; + options.MaxEntriesPerFetch = 10; + options.MaxKnownEntries = 32; + }); + + services.Configure(CccsOptions.HttpClientName, builderOptions => + { + builderOptions.HttpMessageHandlerBuilderActions.Add(builder => + { + builder.PrimaryHandler = _handler; + }); + }); + + var provider = services.BuildServiceProvider(); + var bootstrapper = provider.GetRequiredService(); + await bootstrapper.InitializeAsync(CancellationToken.None); + return provider; + } + + private void SeedFeedResponses() + { + AddJsonResponse(FeedUri, ReadFixture("cccs-feed-en.json")); + AddJsonResponse(TaxonomyUri, ReadFixture("cccs-taxonomy-en.json")); + } + + private void AddJsonResponse(Uri uri, string json, string? etag = null) + { + _handler.AddResponse(uri, () => + { + var response = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(json, Encoding.UTF8, "application/json"), + }; + if (!string.IsNullOrWhiteSpace(etag)) + { + response.Headers.ETag = new EntityTagHeaderValue(etag); + } + + return response; + }); + } + + private static string ReadFixture(string fileName) + => System.IO.File.ReadAllText(System.IO.Path.Combine(AppContext.BaseDirectory, "Fixtures", fileName)); + + public Task InitializeAsync() => Task.CompletedTask; + + public Task DisposeAsync() => Task.CompletedTask; +} diff --git a/src/StellaOps.Feedser.Source.Cccs.Tests/Fixtures/cccs-feed-en.json b/src/StellaOps.Feedser.Source.Cccs.Tests/Fixtures/cccs-feed-en.json new file mode 100644 index 00000000..2d701af6 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs.Tests/Fixtures/cccs-feed-en.json @@ -0,0 +1,25 @@ +{ + "ERROR": false, + "response": [ + { + "nid": 1001, + "title": "Test Advisory Title", + "uuid": "uuid-test-001", + "banner": null, + "lang": "en", + "date_modified": "2025-08-11", + "date_modified_ts": "2025-08-11T12:00:00Z", + "date_created": "2025-08-10T15:30:00Z", + "summary": "Summary of advisory.", + "body": [ + "

Number: TEST-001
Date: 14 April 2018

Affected Products

  • Vendor Widget 1.0
  • Vendor Widget 2.0

See Details Link.

Internal link Contact.

Mitigation for CVE-2020-1234 and CVE-2021-9999.

" + ], + "url": "/en/alerts-advisories/test-advisory", + "alert_type": 397, + "serial_number": "TEST-001", + "subject": "Infrastructure", + "moderation_state": "published", + "external_url": "https://example.com/external/advisory" + } + ] +} diff --git a/src/StellaOps.Feedser.Source.Cccs.Tests/Fixtures/cccs-raw-advisory-fr.json b/src/StellaOps.Feedser.Source.Cccs.Tests/Fixtures/cccs-raw-advisory-fr.json new file mode 100644 index 00000000..3fc2df85 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs.Tests/Fixtures/cccs-raw-advisory-fr.json @@ -0,0 +1,21 @@ +{ + "sourceId": "TEST-002-FR", + "serialNumber": "TEST-002-FR", + "uuid": "uuid-test-002", + "language": "fr", + "title": "Avis de sécurité – Mise à jour urgente", + "summary": "Résumé de l'avis en français.", + "canonicalUrl": "https://www.cyber.gc.ca/fr/alertes-avis/test-avis", + "externalUrl": "https://exemple.ca/avis", + "bodyHtml": "

Numéro : TEST-002-FR
Date : 15 août 2025

Produits touchés

  • Produit Exemple 3.1
  • Produit Exemple 3.2
    • Variante 3.2.1

Voir Lien de détails.

Lien interne Contactez-nous.

Correctifs pour CVE-2024-1111.

", + "bodySegments": [ + "

Numéro : TEST-002-FR
Date : 15 août 2025

Produits touchés

  • Produit Exemple 3.1
  • Produit Exemple 3.2
    • Variante 3.2.1

Voir Lien de détails.

Lien interne Contactez-nous.

Correctifs pour CVE-2024-1111.

" + ], + "alertType": "Alerte", + "subject": "Infrastructure critique", + "banner": null, + "published": "2025-08-15T13:45:00Z", + "modified": "2025-08-16T09:15:00Z", + "rawCreated": "15 août 2025", + "rawModified": "2025-08-16T09:15:00Z" +} diff --git a/src/StellaOps.Feedser.Source.Cccs.Tests/Fixtures/cccs-raw-advisory.json b/src/StellaOps.Feedser.Source.Cccs.Tests/Fixtures/cccs-raw-advisory.json new file mode 100644 index 00000000..7ff94196 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs.Tests/Fixtures/cccs-raw-advisory.json @@ -0,0 +1,21 @@ +{ + "sourceId": "TEST-001", + "serialNumber": "TEST-001", + "uuid": "uuid-test-001", + "language": "en", + "title": "Test Advisory Title", + "summary": "Summary of advisory.", + "canonicalUrl": "https://www.cyber.gc.ca/en/alerts-advisories/test-advisory", + "externalUrl": "https://example.com/external/advisory", + "bodyHtml": "

Number: TEST-001
Date: 14 April 2018

Affected Products

  • Vendor Widget 1.0
  • Vendor Widget 2.0

See Details Link.

Internal link Contact.

Mitigation for CVE-2020-1234 and CVE-2021-9999.

", + "bodySegments": [ + "

Number: TEST-001
Date: 14 April 2018

Affected Products

  • Vendor Widget 1.0
  • Vendor Widget 2.0

See Details Link.

Internal link Contact.

Mitigation for CVE-2020-1234 and CVE-2021-9999.

" + ], + "alertType": "Advisory", + "subject": "Infrastructure", + "banner": null, + "published": "2025-08-10T15:30:00Z", + "modified": "2025-08-11T12:00:00Z", + "rawCreated": "August 10, 2025", + "rawModified": "2025-08-11T12:00:00Z" +} diff --git a/src/StellaOps.Feedser.Source.Cccs.Tests/Fixtures/cccs-taxonomy-en.json b/src/StellaOps.Feedser.Source.Cccs.Tests/Fixtures/cccs-taxonomy-en.json new file mode 100644 index 00000000..88a77fad --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs.Tests/Fixtures/cccs-taxonomy-en.json @@ -0,0 +1,13 @@ +{ + "ERROR": false, + "response": [ + { + "id": 396, + "title": "Advisory" + }, + { + "id": 397, + "title": "Alert" + } + ] +} diff --git a/src/StellaOps.Feedser.Source.Cccs.Tests/Internal/CccsHtmlParserTests.cs b/src/StellaOps.Feedser.Source.Cccs.Tests/Internal/CccsHtmlParserTests.cs new file mode 100644 index 00000000..20e14885 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs.Tests/Internal/CccsHtmlParserTests.cs @@ -0,0 +1,92 @@ +using System; +using System.IO; +using System.Linq; +using System.Text.Json; +using FluentAssertions; +using StellaOps.Feedser.Source.Cccs.Internal; +using StellaOps.Feedser.Source.Common.Html; +using Xunit; +using Xunit.Abstractions; + +namespace StellaOps.Feedser.Source.Cccs.Tests.Internal; + +public sealed class CccsHtmlParserTests +{ + private readonly ITestOutputHelper _output; + private static readonly HtmlContentSanitizer Sanitizer = new(); + private static readonly CccsHtmlParser Parser = new(Sanitizer); + + public CccsHtmlParserTests(ITestOutputHelper output) + { + _output = output ?? throw new ArgumentNullException(nameof(output)); + } + + public static IEnumerable ParserCases() + { + yield return new object[] + { + "cccs-raw-advisory.json", + "TEST-001", + "en", + new[] { "Vendor Widget 1.0", "Vendor Widget 2.0" }, + new[] + { + "https://example.com/details", + "https://www.cyber.gc.ca/en/contact-cyber-centre?lang=en" + }, + new[] { "CVE-2020-1234", "CVE-2021-9999" } + }; + + yield return new object[] + { + "cccs-raw-advisory-fr.json", + "TEST-002-FR", + "fr", + new[] { "Produit Exemple 3.1", "Produit Exemple 3.2", "Variante 3.2.1" }, + new[] + { + "https://exemple.ca/details", + "https://www.cyber.gc.ca/fr/contact-centre-cyber" + }, + new[] { "CVE-2024-1111" } + }; + } + + [Theory] + [MemberData(nameof(ParserCases))] + public void Parse_ExtractsExpectedFields( + string fixtureName, + string expectedSerial, + string expectedLanguage, + string[] expectedProducts, + string[] expectedReferenceUrls, + string[] expectedCves) + { + var raw = LoadFixture(fixtureName); + + var dto = Parser.Parse(raw); + + _output.WriteLine("Products: {0}", string.Join("|", dto.Products)); + _output.WriteLine("References: {0}", string.Join("|", dto.References.Select(r => $"{r.Url} ({r.Label})"))); + _output.WriteLine("CVEs: {0}", string.Join("|", dto.CveIds)); + + dto.SerialNumber.Should().Be(expectedSerial); + dto.Language.Should().Be(expectedLanguage); + dto.Products.Should().BeEquivalentTo(expectedProducts); + foreach (var url in expectedReferenceUrls) + { + dto.References.Should().Contain(reference => reference.Url == url); + } + + dto.CveIds.Should().BeEquivalentTo(expectedCves); + dto.ContentHtml.Should().Contain("
    ").And.Contain("
  • "); + dto.ContentHtml.Should().Contain("(string fileName) + { + var path = Path.Combine(AppContext.BaseDirectory, "Fixtures", fileName); + var json = File.ReadAllText(path); + return JsonSerializer.Deserialize(json, new JsonSerializerOptions(JsonSerializerDefaults.Web))!; + } +} diff --git a/src/StellaOps.Feedser.Source.Cccs.Tests/Internal/CccsMapperTests.cs b/src/StellaOps.Feedser.Source.Cccs.Tests/Internal/CccsMapperTests.cs new file mode 100644 index 00000000..54d5d898 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs.Tests/Internal/CccsMapperTests.cs @@ -0,0 +1,43 @@ +using System; +using FluentAssertions; +using StellaOps.Feedser.Source.Cccs.Internal; +using StellaOps.Feedser.Source.Common; +using StellaOps.Feedser.Source.Common.Html; +using StellaOps.Feedser.Storage.Mongo.Documents; +using Xunit; + +namespace StellaOps.Feedser.Source.Cccs.Tests.Internal; + +public sealed class CccsMapperTests +{ + [Fact] + public void Map_CreatesCanonicalAdvisory() + { + var raw = CccsHtmlParserTests.LoadFixture("cccs-raw-advisory.json"); + var dto = new CccsHtmlParser(new HtmlContentSanitizer()).Parse(raw); + var document = new DocumentRecord( + Guid.NewGuid(), + CccsConnectorPlugin.SourceName, + dto.CanonicalUrl, + DateTimeOffset.UtcNow, + "sha-test", + DocumentStatuses.PendingMap, + "application/json", + Headers: null, + Metadata: null, + Etag: null, + LastModified: dto.Modified, + GridFsId: null); + + var recordedAt = DateTimeOffset.Parse("2025-08-12T00:00:00Z"); + var advisory = CccsMapper.Map(dto, document, recordedAt); + + advisory.AdvisoryKey.Should().Be("TEST-001"); + advisory.Title.Should().Be(dto.Title); + advisory.Aliases.Should().Contain(new[] { "TEST-001", "CVE-2020-1234", "CVE-2021-9999" }); + advisory.References.Should().Contain(reference => reference.Url == dto.CanonicalUrl && reference.Kind == "details"); + advisory.References.Should().Contain(reference => reference.Url == "https://example.com/details"); + advisory.AffectedPackages.Should().HaveCount(2); + advisory.Provenance.Should().ContainSingle(p => p.Source == CccsConnectorPlugin.SourceName && p.Kind == "advisory"); + } +} diff --git a/src/StellaOps.Feedser.Source.Cccs.Tests/StellaOps.Feedser.Source.Cccs.Tests.csproj b/src/StellaOps.Feedser.Source.Cccs.Tests/StellaOps.Feedser.Source.Cccs.Tests.csproj new file mode 100644 index 00000000..3985bbe0 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs.Tests/StellaOps.Feedser.Source.Cccs.Tests.csproj @@ -0,0 +1,19 @@ + + + net10.0 + enable + enable + + + + + + + + + + + PreserveNewest + + + diff --git a/src/StellaOps.Feedser.Source.Cccs/CccsConnector.cs b/src/StellaOps.Feedser.Source.Cccs/CccsConnector.cs new file mode 100644 index 00000000..02595be9 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs/CccsConnector.cs @@ -0,0 +1,606 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net.Http; +using System.Security.Cryptography; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using System.Globalization; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Bson; +using StellaOps.Feedser.Source.Cccs.Configuration; +using StellaOps.Feedser.Source.Cccs.Internal; +using StellaOps.Feedser.Source.Common; +using StellaOps.Feedser.Source.Common.Fetch; +using StellaOps.Feedser.Storage.Mongo; +using StellaOps.Feedser.Storage.Mongo.Advisories; +using StellaOps.Feedser.Storage.Mongo.Documents; +using StellaOps.Feedser.Storage.Mongo.Dtos; +using StellaOps.Plugin; + +namespace StellaOps.Feedser.Source.Cccs; + +public sealed class CccsConnector : IFeedConnector +{ + private static readonly JsonSerializerOptions RawSerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + }; + + private static readonly JsonSerializerOptions DtoSerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + }; + + private const string DtoSchemaVersion = "cccs.dto.v1"; + + private readonly CccsFeedClient _feedClient; + private readonly RawDocumentStorage _rawDocumentStorage; + private readonly IDocumentStore _documentStore; + private readonly IDtoStore _dtoStore; + private readonly IAdvisoryStore _advisoryStore; + private readonly ISourceStateRepository _stateRepository; + private readonly CccsHtmlParser _htmlParser; + private readonly CccsDiagnostics _diagnostics; + private readonly CccsOptions _options; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + public CccsConnector( + CccsFeedClient feedClient, + RawDocumentStorage rawDocumentStorage, + IDocumentStore documentStore, + IDtoStore dtoStore, + IAdvisoryStore advisoryStore, + ISourceStateRepository stateRepository, + CccsHtmlParser htmlParser, + CccsDiagnostics diagnostics, + IOptions options, + TimeProvider? timeProvider, + ILogger logger) + { + _feedClient = feedClient ?? throw new ArgumentNullException(nameof(feedClient)); + _rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage)); + _documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore)); + _dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore)); + _advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore)); + _stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository)); + _htmlParser = htmlParser ?? throw new ArgumentNullException(nameof(htmlParser)); + _diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics)); + _options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options)); + _options.Validate(); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public string SourceName => CccsConnectorPlugin.SourceName; + + public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var now = _timeProvider.GetUtcNow(); + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + var pendingDocuments = new HashSet(cursor.PendingDocuments); + var pendingMappings = new HashSet(cursor.PendingMappings); + var knownHashes = new Dictionary(cursor.KnownEntryHashes, StringComparer.Ordinal); + var feedsProcessed = 0; + var totalItems = 0; + var added = 0; + var unchanged = 0; + + try + { + foreach (var feed in _options.Feeds) + { + cancellationToken.ThrowIfCancellationRequested(); + + _diagnostics.FetchAttempt(); + var result = await _feedClient.FetchAsync(feed, _options.RequestTimeout, cancellationToken).ConfigureAwait(false); + feedsProcessed++; + totalItems += result.Items.Count; + + if (result.Items.Count == 0) + { + _diagnostics.FetchSuccess(); + await DelayBetweenRequestsAsync(cancellationToken).ConfigureAwait(false); + continue; + } + + var items = result.Items + .Where(static item => !string.IsNullOrWhiteSpace(item.Title)) + .OrderByDescending(item => ParseDate(item.DateModifiedTimestamp) ?? ParseDate(item.DateModified) ?? DateTimeOffset.MinValue) + .ThenByDescending(item => ParseDate(item.DateCreated) ?? DateTimeOffset.MinValue) + .ToList(); + + foreach (var item in items) + { + cancellationToken.ThrowIfCancellationRequested(); + + var documentUri = BuildDocumentUri(item, feed); + var rawDocument = CreateRawDocument(item, feed, result.AlertTypes); + var payload = JsonSerializer.SerializeToUtf8Bytes(rawDocument, RawSerializerOptions); + var sha = ComputeSha256(payload); + + if (knownHashes.TryGetValue(documentUri, out var existingHash) + && string.Equals(existingHash, sha, StringComparison.Ordinal)) + { + unchanged++; + _diagnostics.FetchUnchanged(); + continue; + } + + var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, documentUri, cancellationToken).ConfigureAwait(false); + if (existing is not null + && string.Equals(existing.Sha256, sha, StringComparison.OrdinalIgnoreCase) + && string.Equals(existing.Status, DocumentStatuses.Mapped, StringComparison.Ordinal)) + { + knownHashes[documentUri] = sha; + unchanged++; + _diagnostics.FetchUnchanged(); + continue; + } + + var gridFsId = await _rawDocumentStorage.UploadAsync( + SourceName, + documentUri, + payload, + "application/json", + expiresAt: null, + cancellationToken).ConfigureAwait(false); + + var metadata = new Dictionary(StringComparer.Ordinal) + { + ["cccs.language"] = rawDocument.Language, + ["cccs.sourceId"] = rawDocument.SourceId, + }; + + if (!string.IsNullOrWhiteSpace(rawDocument.SerialNumber)) + { + metadata["cccs.serialNumber"] = rawDocument.SerialNumber!; + } + + if (!string.IsNullOrWhiteSpace(rawDocument.AlertType)) + { + metadata["cccs.alertType"] = rawDocument.AlertType!; + } + + var recordId = existing?.Id ?? Guid.NewGuid(); + var record = new DocumentRecord( + recordId, + SourceName, + documentUri, + now, + sha, + DocumentStatuses.PendingParse, + "application/json", + Headers: null, + Metadata: metadata, + Etag: null, + LastModified: rawDocument.Modified ?? rawDocument.Published ?? result.LastModifiedUtc, + GridFsId: gridFsId, + ExpiresAt: null); + + var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false); + pendingDocuments.Add(upserted.Id); + pendingMappings.Remove(upserted.Id); + knownHashes[documentUri] = sha; + added++; + _diagnostics.FetchDocument(); + + if (added >= _options.MaxEntriesPerFetch) + { + break; + } + } + + _diagnostics.FetchSuccess(); + await DelayBetweenRequestsAsync(cancellationToken).ConfigureAwait(false); + + if (added >= _options.MaxEntriesPerFetch) + { + break; + } + } + } + catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException or JsonException or InvalidOperationException) + { + _diagnostics.FetchFailure(); + _logger.LogError(ex, "CCCS fetch failed"); + await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false); + throw; + } + + var trimmedHashes = TrimKnownHashes(knownHashes, _options.MaxKnownEntries); + var updatedCursor = cursor + .WithPendingDocuments(pendingDocuments) + .WithPendingMappings(pendingMappings) + .WithKnownEntryHashes(trimmedHashes) + .WithLastFetch(now); + + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + _logger.LogInformation( + "CCCS fetch completed feeds={Feeds} items={Items} newDocuments={Added} unchanged={Unchanged} pendingDocuments={PendingDocuments} pendingMappings={PendingMappings}", + feedsProcessed, + totalItems, + added, + unchanged, + pendingDocuments.Count, + pendingMappings.Count); + } + + public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + if (cursor.PendingDocuments.Count == 0) + { + return; + } + + var pendingDocuments = cursor.PendingDocuments.ToList(); + var pendingMappings = cursor.PendingMappings.ToList(); + var now = _timeProvider.GetUtcNow(); + var parsed = 0; + var parseFailures = 0; + + foreach (var documentId in cursor.PendingDocuments) + { + cancellationToken.ThrowIfCancellationRequested(); + + var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); + if (document is null) + { + pendingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + _diagnostics.ParseFailure(); + parseFailures++; + continue; + } + + if (!document.GridFsId.HasValue) + { + _diagnostics.ParseFailure(); + _logger.LogWarning("CCCS document {DocumentId} missing GridFS payload", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + parseFailures++; + continue; + } + + byte[] payload; + try + { + payload = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _diagnostics.ParseFailure(); + _logger.LogError(ex, "CCCS unable to download raw document {DocumentId}", documentId); + throw; + } + + CccsRawAdvisoryDocument? raw; + try + { + raw = JsonSerializer.Deserialize(payload, RawSerializerOptions); + } + catch (Exception ex) + { + _diagnostics.ParseFailure(); + _logger.LogWarning(ex, "CCCS failed to deserialize raw document {DocumentId}", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + parseFailures++; + continue; + } + + if (raw is null) + { + _diagnostics.ParseFailure(); + _logger.LogWarning("CCCS raw document {DocumentId} produced null payload", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + parseFailures++; + continue; + } + + CccsAdvisoryDto dto; + try + { + dto = _htmlParser.Parse(raw); + } + catch (Exception ex) + { + _diagnostics.ParseFailure(); + _logger.LogWarning(ex, "CCCS failed to parse advisory DTO for {DocumentId}", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + parseFailures++; + continue; + } + + var dtoJson = JsonSerializer.Serialize(dto, DtoSerializerOptions); + var dtoBson = BsonDocument.Parse(dtoJson); + var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, DtoSchemaVersion, dtoBson, now); + await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false); + + pendingDocuments.Remove(documentId); + if (!pendingMappings.Contains(documentId)) + { + pendingMappings.Add(documentId); + } + _diagnostics.ParseSuccess(); + parsed++; + } + + var updatedCursor = cursor + .WithPendingDocuments(pendingDocuments) + .WithPendingMappings(pendingMappings); + + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + if (parsed > 0 || parseFailures > 0) + { + _logger.LogInformation( + "CCCS parse completed parsed={Parsed} failures={Failures} pendingDocuments={PendingDocuments} pendingMappings={PendingMappings}", + parsed, + parseFailures, + pendingDocuments.Count, + pendingMappings.Count); + } + } + + public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + if (cursor.PendingMappings.Count == 0) + { + return; + } + + var pendingMappings = cursor.PendingMappings.ToList(); + var mapped = 0; + var mappingFailures = 0; + + foreach (var documentId in cursor.PendingMappings) + { + cancellationToken.ThrowIfCancellationRequested(); + + var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); + if (document is null) + { + pendingMappings.Remove(documentId); + _diagnostics.MapFailure(); + mappingFailures++; + continue; + } + + var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false); + if (dtoRecord is null) + { + _diagnostics.MapFailure(); + _logger.LogWarning("CCCS document {DocumentId} missing DTO payload", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + mappingFailures++; + continue; + } + + CccsAdvisoryDto? dto; + try + { + var json = dtoRecord.Payload.ToJson(); + dto = JsonSerializer.Deserialize(json, DtoSerializerOptions); + } + catch (Exception ex) + { + _diagnostics.MapFailure(); + _logger.LogWarning(ex, "CCCS failed to deserialize DTO for document {DocumentId}", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + mappingFailures++; + continue; + } + + if (dto is null) + { + _diagnostics.MapFailure(); + _logger.LogWarning("CCCS DTO for document {DocumentId} evaluated to null", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + mappingFailures++; + continue; + } + + try + { + var advisory = CccsMapper.Map(dto, document, dtoRecord.ValidatedAt); + await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + _diagnostics.MapSuccess(); + mapped++; + } + catch (Exception ex) + { + _diagnostics.MapFailure(); + _logger.LogError(ex, "CCCS mapping failed for document {DocumentId}", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + mappingFailures++; + } + } + + var updatedCursor = cursor.WithPendingMappings(pendingMappings); + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + if (mapped > 0 || mappingFailures > 0) + { + _logger.LogInformation( + "CCCS map completed mapped={Mapped} failures={Failures} pendingMappings={PendingMappings}", + mapped, + mappingFailures, + pendingMappings.Count); + } + } + + private async Task GetCursorAsync(CancellationToken cancellationToken) + { + var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false); + return state is null ? CccsCursor.Empty : CccsCursor.FromBson(state.Cursor); + } + + private Task UpdateCursorAsync(CccsCursor cursor, CancellationToken cancellationToken) + { + var document = cursor.ToBsonDocument(); + var completedAt = cursor.LastFetchAt ?? _timeProvider.GetUtcNow(); + return _stateRepository.UpdateCursorAsync(SourceName, document, completedAt, cancellationToken); + } + + private async Task DelayBetweenRequestsAsync(CancellationToken cancellationToken) + { + if (_options.RequestDelay <= TimeSpan.Zero) + { + return; + } + + try + { + await Task.Delay(_options.RequestDelay, cancellationToken).ConfigureAwait(false); + } + catch (TaskCanceledException) + { + // Ignore cancellation during delay; caller handles. + } + } + + private static string BuildDocumentUri(CccsFeedItem item, CccsFeedEndpoint feed) + { + if (!string.IsNullOrWhiteSpace(item.Url)) + { + if (Uri.TryCreate(item.Url, UriKind.Absolute, out var absolute)) + { + return absolute.ToString(); + } + + var baseUri = new Uri("https://www.cyber.gc.ca", UriKind.Absolute); + if (Uri.TryCreate(baseUri, item.Url, out var combined)) + { + return combined.ToString(); + } + } + + return $"https://www.cyber.gc.ca/api/cccs/threats/{feed.Language}/{item.Nid}"; + } + + private static CccsRawAdvisoryDocument CreateRawDocument(CccsFeedItem item, CccsFeedEndpoint feed, IReadOnlyDictionary taxonomy) + { + var language = string.IsNullOrWhiteSpace(item.Language) ? feed.Language : item.Language!.Trim(); + var identifier = !string.IsNullOrWhiteSpace(item.SerialNumber) + ? item.SerialNumber!.Trim() + : !string.IsNullOrWhiteSpace(item.Uuid) + ? item.Uuid!.Trim() + : $"nid-{item.Nid}"; + + var canonicalUrl = BuildDocumentUri(item, feed); + var bodySegments = item.Body ?? Array.Empty(); + var bodyHtml = string.Join(Environment.NewLine, bodySegments); + var published = ParseDate(item.DateCreated); + var modified = ParseDate(item.DateModifiedTimestamp) ?? ParseDate(item.DateModified); + var alertType = ResolveAlertType(item, taxonomy); + + return new CccsRawAdvisoryDocument + { + SourceId = identifier, + SerialNumber = item.SerialNumber?.Trim(), + Uuid = item.Uuid, + Language = language.ToLowerInvariant(), + Title = item.Title?.Trim() ?? identifier, + Summary = item.Summary?.Trim(), + CanonicalUrl = canonicalUrl, + ExternalUrl = item.ExternalUrl, + BodyHtml = bodyHtml, + BodySegments = bodySegments, + AlertType = alertType, + Subject = item.Subject, + Banner = item.Banner, + Published = published, + Modified = modified, + RawDateCreated = item.DateCreated, + RawDateModified = item.DateModifiedTimestamp ?? item.DateModified, + }; + } + + private static string? ResolveAlertType(CccsFeedItem item, IReadOnlyDictionary taxonomy) + { + if (item.AlertType.ValueKind == JsonValueKind.Number) + { + var id = item.AlertType.GetInt32(); + return taxonomy.TryGetValue(id, out var label) ? label : id.ToString(CultureInfo.InvariantCulture); + } + + if (item.AlertType.ValueKind == JsonValueKind.String) + { + return item.AlertType.GetString(); + } + + if (item.AlertType.ValueKind == JsonValueKind.Array) + { + foreach (var element in item.AlertType.EnumerateArray()) + { + if (element.ValueKind == JsonValueKind.Number) + { + var id = element.GetInt32(); + if (taxonomy.TryGetValue(id, out var label)) + { + return label; + } + } + else if (element.ValueKind == JsonValueKind.String) + { + var label = element.GetString(); + if (!string.IsNullOrWhiteSpace(label)) + { + return label; + } + } + } + } + + return null; + } + + private static Dictionary TrimKnownHashes(Dictionary hashes, int maxEntries) + { + if (hashes.Count <= maxEntries) + { + return hashes; + } + + var overflow = hashes.Count - maxEntries; + foreach (var key in hashes.Keys.Take(overflow).ToList()) + { + hashes.Remove(key); + } + + return hashes; + } + + private static DateTimeOffset? ParseDate(string? value) + => string.IsNullOrWhiteSpace(value) + ? null + : DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var parsed) + ? parsed + : null; + + private static string ComputeSha256(byte[] payload) + => Convert.ToHexString(SHA256.HashData(payload)).ToLowerInvariant(); +} diff --git a/src/StellaOps.Feedser.Source.Cccs/CccsConnectorPlugin.cs b/src/StellaOps.Feedser.Source.Cccs/CccsConnectorPlugin.cs new file mode 100644 index 00000000..2bba9dc4 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs/CccsConnectorPlugin.cs @@ -0,0 +1,21 @@ +using System; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Plugin; + +namespace StellaOps.Feedser.Source.Cccs; + +public sealed class CccsConnectorPlugin : IConnectorPlugin +{ + public const string SourceName = "cccs"; + + public string Name => SourceName; + + public bool IsAvailable(IServiceProvider services) + => services.GetService() is not null; + + public IFeedConnector Create(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + return services.GetRequiredService(); + } +} diff --git a/src/StellaOps.Feedser.Source.Cccs/CccsDependencyInjectionRoutine.cs b/src/StellaOps.Feedser.Source.Cccs/CccsDependencyInjectionRoutine.cs new file mode 100644 index 00000000..a6812e0f --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs/CccsDependencyInjectionRoutine.cs @@ -0,0 +1,50 @@ +using System; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.DependencyInjection; +using StellaOps.Feedser.Core.Jobs; +using StellaOps.Feedser.Source.Cccs.Configuration; + +namespace StellaOps.Feedser.Source.Cccs; + +public sealed class CccsDependencyInjectionRoutine : IDependencyInjectionRoutine +{ + private const string ConfigurationSection = "feedser:sources:cccs"; + + public IServiceCollection Register(IServiceCollection services, IConfiguration configuration) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services.AddCccsConnector(options => + { + configuration.GetSection(ConfigurationSection).Bind(options); + options.Validate(); + }); + + services.AddTransient(); + + services.PostConfigure(options => + { + EnsureJob(options, CccsJobKinds.Fetch, typeof(CccsFetchJob)); + }); + + return services; + } + + private static void EnsureJob(JobSchedulerOptions options, string kind, Type jobType) + { + if (options.Definitions.ContainsKey(kind)) + { + return; + } + + options.Definitions[kind] = new JobDefinition( + kind, + jobType, + options.DefaultTimeout, + options.DefaultLeaseDuration, + CronExpression: null, + Enabled: true); + } +} diff --git a/src/StellaOps.Feedser.Source.Cccs/CccsServiceCollectionExtensions.cs b/src/StellaOps.Feedser.Source.Cccs/CccsServiceCollectionExtensions.cs new file mode 100644 index 00000000..adb3c7d3 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs/CccsServiceCollectionExtensions.cs @@ -0,0 +1,47 @@ +using System; +using System.Linq; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; +using StellaOps.Feedser.Source.Cccs.Configuration; +using StellaOps.Feedser.Source.Cccs.Internal; +using StellaOps.Feedser.Source.Common.Http; +using StellaOps.Feedser.Source.Common.Html; + +namespace StellaOps.Feedser.Source.Cccs; + +public static class CccsServiceCollectionExtensions +{ + public static IServiceCollection AddCccsConnector(this IServiceCollection services, Action configure) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configure); + + services.AddOptions() + .Configure(configure) + .PostConfigure(static options => options.Validate()); + + services.AddSourceHttpClient(CccsOptions.HttpClientName, static (sp, clientOptions) => + { + var options = sp.GetRequiredService>().Value; + clientOptions.UserAgent = "StellaOps.Feedser.Cccs/1.0"; + clientOptions.Timeout = options.RequestTimeout; + clientOptions.AllowedHosts.Clear(); + + foreach (var feed in options.Feeds.Where(static feed => feed.Uri is not null)) + { + clientOptions.AllowedHosts.Add(feed.Uri!.Host); + } + + clientOptions.AllowedHosts.Add("www.cyber.gc.ca"); + clientOptions.AllowedHosts.Add("cyber.gc.ca"); + }); + + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.AddTransient(); + return services; + } +} diff --git a/src/StellaOps.Feedser.Source.Cccs/Class1.cs b/src/StellaOps.Feedser.Source.Cccs/Class1.cs deleted file mode 100644 index 220d4c88..00000000 --- a/src/StellaOps.Feedser.Source.Cccs/Class1.cs +++ /dev/null @@ -1,29 +0,0 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Plugin; - -namespace StellaOps.Feedser.Source.Cccs; - -public sealed class CccsConnectorPlugin : IConnectorPlugin -{ - public string Name => "cccs"; - - public bool IsAvailable(IServiceProvider services) => true; - - public IFeedConnector Create(IServiceProvider services) => new StubConnector(Name); - - private sealed class StubConnector : IFeedConnector - { - public StubConnector(string sourceName) => SourceName = sourceName; - - public string SourceName { get; } - - public Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - - public Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - - public Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - } -} - diff --git a/src/StellaOps.Feedser.Source.Cccs/Configuration/CccsOptions.cs b/src/StellaOps.Feedser.Source.Cccs/Configuration/CccsOptions.cs new file mode 100644 index 00000000..16c2fa8f --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs/Configuration/CccsOptions.cs @@ -0,0 +1,175 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Feedser.Source.Cccs.Configuration; + +public sealed class CccsOptions +{ + public const string HttpClientName = "feedser.source.cccs"; + + private readonly List _feeds = new(); + + public CccsOptions() + { + _feeds.Add(new CccsFeedEndpoint("en", new Uri("https://www.cyber.gc.ca/api/cccs/threats/v1/get?lang=en&content_type=cccs_threat"))); + _feeds.Add(new CccsFeedEndpoint("fr", new Uri("https://www.cyber.gc.ca/api/cccs/threats/v1/get?lang=fr&content_type=cccs_threat"))); + } + + /// + /// Feed endpoints to poll; configure per language or content category. + /// + public IList Feeds => _feeds; + + /// + /// Maximum number of entries to enqueue per fetch cycle. + /// + public int MaxEntriesPerFetch { get; set; } = 80; + + /// + /// Maximum remembered entries (URI+hash) for deduplication. + /// + public int MaxKnownEntries { get; set; } = 512; + + /// + /// Timeout applied to feed and taxonomy requests. + /// + public TimeSpan RequestTimeout { get; set; } = TimeSpan.FromSeconds(30); + + /// + /// Delay between successive feed requests to respect upstream throttling. + /// + public TimeSpan RequestDelay { get; set; } = TimeSpan.FromMilliseconds(250); + + /// + /// Backoff recorded in source state when fetch fails. + /// + public TimeSpan FailureBackoff { get; set; } = TimeSpan.FromMinutes(1); + + public void Validate() + { + if (_feeds.Count == 0) + { + throw new InvalidOperationException("At least one CCCS feed endpoint must be configured."); + } + + var seenLanguages = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (var feed in _feeds) + { + feed.Validate(); + if (!seenLanguages.Add(feed.Language)) + { + throw new InvalidOperationException($"Duplicate CCCS feed language configured: '{feed.Language}'. Each language should be unique to avoid duplicate ingestion."); + } + } + + if (MaxEntriesPerFetch <= 0) + { + throw new InvalidOperationException($"{nameof(MaxEntriesPerFetch)} must be greater than zero."); + } + + if (MaxKnownEntries <= 0) + { + throw new InvalidOperationException($"{nameof(MaxKnownEntries)} must be greater than zero."); + } + + if (RequestTimeout <= TimeSpan.Zero) + { + throw new InvalidOperationException($"{nameof(RequestTimeout)} must be positive."); + } + + if (RequestDelay < TimeSpan.Zero) + { + throw new InvalidOperationException($"{nameof(RequestDelay)} cannot be negative."); + } + + if (FailureBackoff <= TimeSpan.Zero) + { + throw new InvalidOperationException($"{nameof(FailureBackoff)} must be positive."); + } + } +} + +public sealed class CccsFeedEndpoint +{ + public CccsFeedEndpoint() + { + } + + public CccsFeedEndpoint(string language, Uri uri) + { + Language = language; + Uri = uri; + } + + public string Language { get; set; } = "en"; + + public Uri? Uri { get; set; } + + public void Validate() + { + if (string.IsNullOrWhiteSpace(Language)) + { + throw new InvalidOperationException("CCCS feed language must be specified."); + } + + if (Uri is null || !Uri.IsAbsoluteUri) + { + throw new InvalidOperationException($"CCCS feed endpoint URI must be an absolute URI (language='{Language}')."); + } + } + + public Uri BuildTaxonomyUri() + { + if (Uri is null) + { + throw new InvalidOperationException("Feed endpoint URI must be configured before building taxonomy URI."); + } + + var language = Uri.GetQueryParameterValueOrDefault("lang", Language); + var builder = $"https://www.cyber.gc.ca/api/cccs/taxonomy/v1/get?lang={language}&vocabulary=cccs_alert_type"; + return new Uri(builder, UriKind.Absolute); + } +} + +internal static class CccsUriExtensions +{ + public static string GetQueryParameterValueOrDefault(this Uri uri, string key, string fallback) + { + if (uri is null) + { + return fallback; + } + + var query = uri.Query; + if (string.IsNullOrEmpty(query)) + { + return fallback; + } + + var trimmed = query.StartsWith("?", StringComparison.Ordinal) ? query[1..] : query; + foreach (var pair in trimmed.Split(new[] { '&' }, StringSplitOptions.RemoveEmptyEntries)) + { + var separatorIndex = pair.IndexOf('='); + if (separatorIndex < 0) + { + continue; + } + + var left = pair[..separatorIndex].Trim(); + if (!left.Equals(key, StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + var right = pair[(separatorIndex + 1)..].Trim(); + if (right.Length == 0) + { + continue; + } + + return Uri.UnescapeDataString(right); + } + + return fallback; + } +} diff --git a/src/StellaOps.Feedser.Source.Cccs/Internal/CccsAdvisoryDto.cs b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsAdvisoryDto.cs new file mode 100644 index 00000000..2f31e3aa --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsAdvisoryDto.cs @@ -0,0 +1,54 @@ +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Feedser.Source.Cccs.Internal; + +internal sealed record CccsAdvisoryDto +{ + [JsonPropertyName("sourceId")] + public string SourceId { get; init; } = string.Empty; + + [JsonPropertyName("serialNumber")] + public string SerialNumber { get; init; } = string.Empty; + + [JsonPropertyName("language")] + public string Language { get; init; } = "en"; + + [JsonPropertyName("title")] + public string Title { get; init; } = string.Empty; + + [JsonPropertyName("summary")] + public string? Summary { get; init; } + + [JsonPropertyName("canonicalUrl")] + public string CanonicalUrl { get; init; } = string.Empty; + + [JsonPropertyName("contentHtml")] + public string ContentHtml { get; init; } = string.Empty; + + [JsonPropertyName("published")] + public DateTimeOffset? Published { get; init; } + + [JsonPropertyName("modified")] + public DateTimeOffset? Modified { get; init; } + + [JsonPropertyName("alertType")] + public string? AlertType { get; init; } + + [JsonPropertyName("subject")] + public string? Subject { get; init; } + + [JsonPropertyName("products")] + public IReadOnlyList Products { get; init; } = Array.Empty(); + + [JsonPropertyName("references")] + public IReadOnlyList References { get; init; } = Array.Empty(); + + [JsonPropertyName("cveIds")] + public IReadOnlyList CveIds { get; init; } = Array.Empty(); +} + +internal sealed record CccsReferenceDto( + [property: JsonPropertyName("url")] string Url, + [property: JsonPropertyName("label")] string? Label); diff --git a/src/StellaOps.Feedser.Source.Cccs/Internal/CccsCursor.cs b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsCursor.cs new file mode 100644 index 00000000..7e50faac --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsCursor.cs @@ -0,0 +1,145 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Bson; + +namespace StellaOps.Feedser.Source.Cccs.Internal; + +internal sealed record CccsCursor( + IReadOnlyCollection PendingDocuments, + IReadOnlyCollection PendingMappings, + IReadOnlyDictionary KnownEntryHashes, + DateTimeOffset? LastFetchAt) +{ + private static readonly IReadOnlyCollection EmptyGuidCollection = Array.Empty(); + private static readonly IReadOnlyDictionary EmptyHashes = new Dictionary(StringComparer.Ordinal); + + public static CccsCursor Empty { get; } = new(EmptyGuidCollection, EmptyGuidCollection, EmptyHashes, null); + + public CccsCursor WithPendingDocuments(IEnumerable documents) + { + var distinct = (documents ?? Enumerable.Empty()).Distinct().ToArray(); + return this with { PendingDocuments = distinct }; + } + + public CccsCursor WithPendingMappings(IEnumerable mappings) + { + var distinct = (mappings ?? Enumerable.Empty()).Distinct().ToArray(); + return this with { PendingMappings = distinct }; + } + + public CccsCursor WithKnownEntryHashes(IReadOnlyDictionary hashes) + { + var map = hashes is null || hashes.Count == 0 + ? EmptyHashes + : new Dictionary(hashes, StringComparer.Ordinal); + return this with { KnownEntryHashes = map }; + } + + public CccsCursor WithLastFetch(DateTimeOffset? timestamp) + => this with { LastFetchAt = timestamp }; + + public BsonDocument ToBsonDocument() + { + var doc = new BsonDocument + { + ["pendingDocuments"] = new BsonArray(PendingDocuments.Select(id => id.ToString())), + ["pendingMappings"] = new BsonArray(PendingMappings.Select(id => id.ToString())), + }; + + if (KnownEntryHashes.Count > 0) + { + var hashes = new BsonArray(); + foreach (var kvp in KnownEntryHashes) + { + hashes.Add(new BsonDocument + { + ["uri"] = kvp.Key, + ["hash"] = kvp.Value, + }); + } + + doc["knownEntryHashes"] = hashes; + } + + if (LastFetchAt.HasValue) + { + doc["lastFetchAt"] = LastFetchAt.Value.UtcDateTime; + } + + return doc; + } + + public static CccsCursor FromBson(BsonDocument? document) + { + if (document is null || document.ElementCount == 0) + { + return Empty; + } + + var pendingDocuments = ReadGuidArray(document, "pendingDocuments"); + var pendingMappings = ReadGuidArray(document, "pendingMappings"); + var hashes = ReadHashMap(document); + var lastFetch = document.TryGetValue("lastFetchAt", out var value) + ? ParseDateTime(value) + : null; + + return new CccsCursor(pendingDocuments, pendingMappings, hashes, lastFetch); + } + + private static IReadOnlyCollection ReadGuidArray(BsonDocument document, string field) + { + if (!document.TryGetValue(field, out var value) || value is not BsonArray array) + { + return EmptyGuidCollection; + } + + var items = new List(array.Count); + foreach (var element in array) + { + if (Guid.TryParse(element?.ToString(), out var guid)) + { + items.Add(guid); + } + } + + return items; + } + + private static IReadOnlyDictionary ReadHashMap(BsonDocument document) + { + if (!document.TryGetValue("knownEntryHashes", out var value) || value is not BsonArray array || array.Count == 0) + { + return EmptyHashes; + } + + var map = new Dictionary(array.Count, StringComparer.Ordinal); + foreach (var element in array) + { + if (element is not BsonDocument entry) + { + continue; + } + + if (!entry.TryGetValue("uri", out var uriValue) || uriValue.IsBsonNull || string.IsNullOrWhiteSpace(uriValue.AsString)) + { + continue; + } + + var hash = entry.TryGetValue("hash", out var hashValue) && !hashValue.IsBsonNull + ? hashValue.AsString + : string.Empty; + map[uriValue.AsString] = hash; + } + + return map; + } + + private static DateTimeOffset? ParseDateTime(BsonValue value) + => value.BsonType switch + { + BsonType.DateTime => DateTime.SpecifyKind(value.ToUniversalTime(), DateTimeKind.Utc), + BsonType.String when DateTimeOffset.TryParse(value.AsString, out var parsed) => parsed.ToUniversalTime(), + _ => null, + }; +} diff --git a/src/StellaOps.Feedser.Source.Cccs/Internal/CccsDiagnostics.cs b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsDiagnostics.cs new file mode 100644 index 00000000..0dc1ae4f --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsDiagnostics.cs @@ -0,0 +1,58 @@ +using System.Diagnostics.Metrics; + +namespace StellaOps.Feedser.Source.Cccs.Internal; + +public sealed class CccsDiagnostics : IDisposable +{ + private const string MeterName = "StellaOps.Feedser.Source.Cccs"; + private const string MeterVersion = "1.0.0"; + + private readonly Meter _meter; + private readonly Counter _fetchAttempts; + private readonly Counter _fetchSuccess; + private readonly Counter _fetchDocuments; + private readonly Counter _fetchUnchanged; + private readonly Counter _fetchFailures; + private readonly Counter _parseSuccess; + private readonly Counter _parseFailures; + private readonly Counter _parseQuarantine; + private readonly Counter _mapSuccess; + private readonly Counter _mapFailures; + + public CccsDiagnostics() + { + _meter = new Meter(MeterName, MeterVersion); + _fetchAttempts = _meter.CreateCounter("cccs.fetch.attempts", unit: "operations"); + _fetchSuccess = _meter.CreateCounter("cccs.fetch.success", unit: "operations"); + _fetchDocuments = _meter.CreateCounter("cccs.fetch.documents", unit: "documents"); + _fetchUnchanged = _meter.CreateCounter("cccs.fetch.unchanged", unit: "documents"); + _fetchFailures = _meter.CreateCounter("cccs.fetch.failures", unit: "operations"); + _parseSuccess = _meter.CreateCounter("cccs.parse.success", unit: "documents"); + _parseFailures = _meter.CreateCounter("cccs.parse.failures", unit: "documents"); + _parseQuarantine = _meter.CreateCounter("cccs.parse.quarantine", unit: "documents"); + _mapSuccess = _meter.CreateCounter("cccs.map.success", unit: "advisories"); + _mapFailures = _meter.CreateCounter("cccs.map.failures", unit: "advisories"); + } + + public void FetchAttempt() => _fetchAttempts.Add(1); + + public void FetchSuccess() => _fetchSuccess.Add(1); + + public void FetchDocument() => _fetchDocuments.Add(1); + + public void FetchUnchanged() => _fetchUnchanged.Add(1); + + public void FetchFailure() => _fetchFailures.Add(1); + + public void ParseSuccess() => _parseSuccess.Add(1); + + public void ParseFailure() => _parseFailures.Add(1); + + public void ParseQuarantine() => _parseQuarantine.Add(1); + + public void MapSuccess() => _mapSuccess.Add(1); + + public void MapFailure() => _mapFailures.Add(1); + + public void Dispose() => _meter.Dispose(); +} diff --git a/src/StellaOps.Feedser.Source.Cccs/Internal/CccsFeedClient.cs b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsFeedClient.cs new file mode 100644 index 00000000..b6914cc6 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsFeedClient.cs @@ -0,0 +1,146 @@ +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Feedser.Source.Cccs.Configuration; +using StellaOps.Feedser.Source.Common.Fetch; + +namespace StellaOps.Feedser.Source.Cccs.Internal; + +public sealed class CccsFeedClient +{ + private static readonly string[] AcceptHeaders = + { + "application/json", + "application/vnd.api+json;q=0.9", + "text/json;q=0.8", + "application/*+json;q=0.7", + }; + + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + }; + + private readonly SourceFetchService _fetchService; + private readonly ILogger _logger; + + public CccsFeedClient(SourceFetchService fetchService, ILogger logger) + { + _fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + internal async Task FetchAsync(CccsFeedEndpoint endpoint, TimeSpan requestTimeout, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(endpoint); + if (endpoint.Uri is null) + { + throw new InvalidOperationException("Feed endpoint URI must be configured."); + } + + var request = new SourceFetchRequest(CccsOptions.HttpClientName, CccsConnectorPlugin.SourceName, endpoint.Uri) + { + AcceptHeaders = AcceptHeaders, + TimeoutOverride = requestTimeout, + Metadata = new Dictionary(StringComparer.Ordinal) + { + ["cccs.language"] = endpoint.Language, + ["cccs.feedUri"] = endpoint.Uri.ToString(), + }, + }; + + try + { + var result = await _fetchService.FetchContentAsync(request, cancellationToken).ConfigureAwait(false); + + if (!result.IsSuccess || result.Content is null) + { + _logger.LogWarning("CCCS feed fetch returned no content for {Uri} (status={Status})", endpoint.Uri, result.StatusCode); + return CccsFeedResult.Empty; + } + + var feedResponse = Deserialize(result.Content); + if (feedResponse is null || feedResponse.Error) + { + _logger.LogWarning("CCCS feed response flagged an error for {Uri}", endpoint.Uri); + return CccsFeedResult.Empty; + } + + var taxonomy = await FetchTaxonomyAsync(endpoint, requestTimeout, cancellationToken).ConfigureAwait(false); + var items = (IReadOnlyList)feedResponse.Response ?? Array.Empty(); + return new CccsFeedResult(items, taxonomy, result.LastModified); + } + catch (Exception ex) when (ex is JsonException or InvalidOperationException) + { + _logger.LogError(ex, "CCCS feed deserialization failed for {Uri}", endpoint.Uri); + throw; + } + catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException) + { + _logger.LogWarning(ex, "CCCS feed fetch failed for {Uri}", endpoint.Uri); + throw; + } + } + + private async Task> FetchTaxonomyAsync(CccsFeedEndpoint endpoint, TimeSpan timeout, CancellationToken cancellationToken) + { + var taxonomyUri = endpoint.BuildTaxonomyUri(); + var request = new SourceFetchRequest(CccsOptions.HttpClientName, CccsConnectorPlugin.SourceName, taxonomyUri) + { + AcceptHeaders = AcceptHeaders, + TimeoutOverride = timeout, + Metadata = new Dictionary(StringComparer.Ordinal) + { + ["cccs.language"] = endpoint.Language, + ["cccs.taxonomyUri"] = taxonomyUri.ToString(), + }, + }; + + try + { + var result = await _fetchService.FetchContentAsync(request, cancellationToken).ConfigureAwait(false); + if (!result.IsSuccess || result.Content is null) + { + _logger.LogDebug("CCCS taxonomy fetch returned no content for {Uri}", taxonomyUri); + return new Dictionary(0); + } + + var taxonomyResponse = Deserialize(result.Content); + if (taxonomyResponse is null || taxonomyResponse.Error) + { + _logger.LogDebug("CCCS taxonomy response indicated error for {Uri}", taxonomyUri); + return new Dictionary(0); + } + + var map = new Dictionary(taxonomyResponse.Response.Count); + foreach (var item in taxonomyResponse.Response) + { + if (!string.IsNullOrWhiteSpace(item.Title)) + { + map[item.Id] = item.Title!; + } + } + + return map; + } + catch (Exception ex) when (ex is JsonException or InvalidOperationException) + { + _logger.LogWarning(ex, "Failed to deserialize CCCS taxonomy for {Uri}", taxonomyUri); + return new Dictionary(0); + } + catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException) + { + _logger.LogWarning(ex, "CCCS taxonomy fetch failed for {Uri}", taxonomyUri); + return new Dictionary(0); + } + } + + private static T? Deserialize(byte[] content) + => JsonSerializer.Deserialize(content, SerializerOptions); +} diff --git a/src/StellaOps.Feedser.Source.Cccs/Internal/CccsFeedModels.cs b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsFeedModels.cs new file mode 100644 index 00000000..b3c44711 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsFeedModels.cs @@ -0,0 +1,101 @@ +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Feedser.Source.Cccs.Internal; + +internal sealed class CccsFeedResponse +{ + [JsonPropertyName("ERROR")] + public bool Error { get; init; } + + [JsonPropertyName("response")] + public List Response { get; init; } = new(); +} + +internal sealed class CccsFeedItem +{ + [JsonPropertyName("nid")] + public int Nid { get; init; } + + [JsonPropertyName("title")] + public string? Title { get; init; } + + [JsonPropertyName("uuid")] + public string? Uuid { get; init; } + + [JsonPropertyName("banner")] + public string? Banner { get; init; } + + [JsonPropertyName("lang")] + public string? Language { get; init; } + + [JsonPropertyName("date_modified")] + public string? DateModified { get; init; } + + [JsonPropertyName("date_modified_ts")] + public string? DateModifiedTimestamp { get; init; } + + [JsonPropertyName("date_created")] + public string? DateCreated { get; init; } + + [JsonPropertyName("summary")] + public string? Summary { get; init; } + + [JsonPropertyName("body")] + public string[] Body { get; init; } = Array.Empty(); + + [JsonPropertyName("url")] + public string? Url { get; init; } + + [JsonPropertyName("alert_type")] + public JsonElement AlertType { get; init; } + + [JsonPropertyName("serial_number")] + public string? SerialNumber { get; init; } + + [JsonPropertyName("subject")] + public string? Subject { get; init; } + + [JsonPropertyName("moderation_state")] + public string? ModerationState { get; init; } + + [JsonPropertyName("external_url")] + public string? ExternalUrl { get; init; } +} + +internal sealed class CccsTaxonomyResponse +{ + [JsonPropertyName("ERROR")] + public bool Error { get; init; } + + [JsonPropertyName("response")] + public List Response { get; init; } = new(); +} + +internal sealed class CccsTaxonomyItem +{ + [JsonPropertyName("id")] + public int Id { get; init; } + + [JsonPropertyName("title")] + public string? Title { get; init; } +} + +internal sealed record CccsFeedResult( + IReadOnlyList Items, + IReadOnlyDictionary AlertTypes, + DateTimeOffset? LastModifiedUtc) +{ + public static CccsFeedResult Empty { get; } = new( + Array.Empty(), + new Dictionary(0), + null); +} + +internal static class CccsFeedResultExtensions +{ + public static CccsFeedResult ToResult(this IReadOnlyList items, DateTimeOffset? lastModified, IReadOnlyDictionary alertTypes) + => new(items, alertTypes, lastModified); +} diff --git a/src/StellaOps.Feedser.Source.Cccs/Internal/CccsHtmlParser.cs b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsHtmlParser.cs new file mode 100644 index 00000000..6b7236be --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsHtmlParser.cs @@ -0,0 +1,449 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Text.RegularExpressions; +using AngleSharp.Dom; +using AngleSharp.Html.Dom; +using AngleSharp.Html.Parser; +using StellaOps.Feedser.Source.Common.Html; + +namespace StellaOps.Feedser.Source.Cccs.Internal; + +public sealed class CccsHtmlParser +{ + private static readonly Regex SerialRegex = new(@"(?:(Number|Num[eé]ro)\s*[::]\s*)(?[A-Z0-9\-\/]+)", RegexOptions.IgnoreCase | RegexOptions.Compiled); + private static readonly Regex DateRegex = new(@"(?:(Date|Date de publication)\s*[::]\s*)(?[A-Za-zÀ-ÿ0-9,\.\s\-]+)", RegexOptions.IgnoreCase | RegexOptions.Compiled); + private static readonly Regex CveRegex = new(@"CVE-\d{4}-\d{4,}", RegexOptions.IgnoreCase | RegexOptions.Compiled); + private static readonly Regex CollapseWhitespaceRegex = new(@"\s+", RegexOptions.Compiled); + + private static readonly CultureInfo[] EnglishCultures = + { + CultureInfo.GetCultureInfo("en-CA"), + CultureInfo.GetCultureInfo("en-US"), + CultureInfo.InvariantCulture, + }; + + private static readonly CultureInfo[] FrenchCultures = + { + CultureInfo.GetCultureInfo("fr-CA"), + CultureInfo.GetCultureInfo("fr-FR"), + CultureInfo.InvariantCulture, + }; + + private static readonly string[] ProductHeadingKeywords = + { + "affected", + "produit", + "produits", + "produits touch", + "produits concern", + "mesures recommand", + }; + + private static readonly string[] TrackingParameterPrefixes = + { + "utm_", + "mc_", + "mkt_", + "elq", + }; + + private readonly HtmlContentSanitizer _sanitizer; + private readonly HtmlParser _parser; + + public CccsHtmlParser(HtmlContentSanitizer sanitizer) + { + _sanitizer = sanitizer ?? throw new ArgumentNullException(nameof(sanitizer)); + _parser = new HtmlParser(new HtmlParserOptions + { + IsScripting = false, + IsKeepingSourceReferences = false, + }); + } + + internal CccsAdvisoryDto Parse(CccsRawAdvisoryDocument raw) + { + ArgumentNullException.ThrowIfNull(raw); + + var baseUri = TryCreateUri(raw.CanonicalUrl); + var document = _parser.ParseDocument(raw.BodyHtml ?? string.Empty); + var body = document.Body ?? document.DocumentElement; + var sanitized = _sanitizer.Sanitize(body?.InnerHtml ?? raw.BodyHtml ?? string.Empty, baseUri); + var contentRoot = body ?? document.DocumentElement; + + var serialNumber = !string.IsNullOrWhiteSpace(raw.SerialNumber) + ? raw.SerialNumber!.Trim() + : ExtractSerialNumber(document) ?? raw.SourceId; + + var published = raw.Published ?? ExtractDate(document, raw.Language) ?? raw.Modified; + var references = ExtractReferences(contentRoot, baseUri, raw.Language); + var products = ExtractProducts(contentRoot); + var cveIds = ExtractCveIds(document); + + return new CccsAdvisoryDto + { + SourceId = raw.SourceId, + SerialNumber = serialNumber, + Language = raw.Language, + Title = raw.Title, + Summary = CollapseWhitespace(raw.Summary), + CanonicalUrl = raw.CanonicalUrl, + ContentHtml = sanitized, + Published = published, + Modified = raw.Modified ?? published, + AlertType = raw.AlertType, + Subject = raw.Subject, + Products = products, + References = references, + CveIds = cveIds, + }; + } + + private static Uri? TryCreateUri(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + return Uri.TryCreate(value, UriKind.Absolute, out var absolute) ? absolute : null; + } + + private static string? ExtractSerialNumber(IDocument document) + { + if (document.Body is null) + { + return null; + } + + foreach (var element in document.QuerySelectorAll("strong, p, div")) + { + var text = element.TextContent; + if (string.IsNullOrWhiteSpace(text)) + { + continue; + } + + var match = SerialRegex.Match(text); + if (match.Success && match.Groups["id"].Success) + { + var value = match.Groups["id"].Value.Trim(); + if (!string.IsNullOrWhiteSpace(value)) + { + return value; + } + } + } + + var bodyText = document.Body.TextContent; + var fallback = SerialRegex.Match(bodyText ?? string.Empty); + return fallback.Success && fallback.Groups["id"].Success + ? fallback.Groups["id"].Value.Trim() + : null; + } + + private static DateTimeOffset? ExtractDate(IDocument document, string language) + { + if (document.Body is null) + { + return null; + } + + var textSegments = new List(); + foreach (var element in document.QuerySelectorAll("strong, p, div")) + { + var text = element.TextContent; + if (string.IsNullOrWhiteSpace(text)) + { + continue; + } + + var match = DateRegex.Match(text); + if (match.Success && match.Groups["date"].Success) + { + textSegments.Add(match.Groups["date"].Value.Trim()); + } + } + + if (textSegments.Count == 0 && !string.IsNullOrWhiteSpace(document.Body.TextContent)) + { + textSegments.Add(document.Body.TextContent); + } + + var cultures = language.StartsWith("fr", StringComparison.OrdinalIgnoreCase) ? FrenchCultures : EnglishCultures; + + foreach (var segment in textSegments) + { + foreach (var culture in cultures) + { + if (DateTime.TryParse(segment, culture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var parsed)) + { + return new DateTimeOffset(parsed.ToUniversalTime()); + } + } + } + + return null; + } + + private static IReadOnlyList ExtractProducts(IElement? root) + { + if (root is null) + { + return Array.Empty(); + } + + var results = new List(); + + foreach (var heading in root.QuerySelectorAll("h1,h2,h3,h4,h5,h6")) + { + var text = heading.TextContent?.Trim(); + if (!IsProductHeading(text)) + { + continue; + } + + var sibling = heading.NextElementSibling; + while (sibling is not null) + { + if (IsHeading(sibling)) + { + break; + } + + if (IsListElement(sibling)) + { + AppendListItems(sibling, results); + if (results.Count > 0) + { + break; + } + } + else if (IsContentContainer(sibling)) + { + foreach (var list in sibling.QuerySelectorAll("ul,ol")) + { + AppendListItems(list, results); + } + + if (results.Count > 0) + { + break; + } + } + + sibling = sibling.NextElementSibling; + } + + if (results.Count > 0) + { + break; + } + } + + if (results.Count == 0) + { + foreach (var li in root.QuerySelectorAll("ul li,ol li")) + { + var itemText = CollapseWhitespace(li.TextContent); + if (!string.IsNullOrWhiteSpace(itemText)) + { + results.Add(itemText); + } + } + } + + return results.Count == 0 + ? Array.Empty() + : results + .Where(static value => !string.IsNullOrWhiteSpace(value)) + .Distinct(StringComparer.OrdinalIgnoreCase) + .OrderBy(static value => value, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static bool IsProductHeading(string? heading) + { + if (string.IsNullOrWhiteSpace(heading)) + { + return false; + } + + var lowered = heading.ToLowerInvariant(); + return ProductHeadingKeywords.Any(keyword => lowered.Contains(keyword, StringComparison.OrdinalIgnoreCase)); + } + + private static bool IsHeading(IElement element) + => element.LocalName.Length == 2 + && element.LocalName[0] == 'h' + && char.IsDigit(element.LocalName[1]); + + private static bool IsListElement(IElement element) + => string.Equals(element.LocalName, "ul", StringComparison.OrdinalIgnoreCase) + || string.Equals(element.LocalName, "ol", StringComparison.OrdinalIgnoreCase); + + private static bool IsContentContainer(IElement element) + => string.Equals(element.LocalName, "div", StringComparison.OrdinalIgnoreCase) + || string.Equals(element.LocalName, "section", StringComparison.OrdinalIgnoreCase) + || string.Equals(element.LocalName, "article", StringComparison.OrdinalIgnoreCase); + + private static void AppendListItems(IElement listElement, ICollection buffer) + { + foreach (var li in listElement.QuerySelectorAll("li")) + { + if (li is null) + { + continue; + } + + var clone = li.Clone(true) as IElement; + if (clone is null) + { + continue; + } + + foreach (var nested in clone.QuerySelectorAll("ul,ol")) + { + nested.Remove(); + } + + var itemText = CollapseWhitespace(clone.TextContent); + if (!string.IsNullOrWhiteSpace(itemText)) + { + buffer.Add(itemText); + } + } + } + + private static IReadOnlyList ExtractReferences(IElement? root, Uri? baseUri, string language) + { + if (root is null) + { + return Array.Empty(); + } + + var references = new List(); + foreach (var anchor in root.QuerySelectorAll("a[href]")) + { + var href = anchor.GetAttribute("href"); + var normalized = NormalizeReferenceUrl(href, baseUri, language); + if (normalized is null) + { + continue; + } + + var label = CollapseWhitespace(anchor.TextContent); + references.Add(new CccsReferenceDto(normalized, string.IsNullOrWhiteSpace(label) ? null : label)); + } + + return references.Count == 0 + ? Array.Empty() + : references + .GroupBy(reference => reference.Url, StringComparer.Ordinal) + .Select(group => group.First()) + .OrderBy(reference => reference.Url, StringComparer.Ordinal) + .ToArray(); + } + + private static string? NormalizeReferenceUrl(string? href, Uri? baseUri, string language) + { + if (string.IsNullOrWhiteSpace(href)) + { + return null; + } + + if (!Uri.TryCreate(href, UriKind.Absolute, out var absolute)) + { + if (baseUri is null || !Uri.TryCreate(baseUri, href, out absolute)) + { + return null; + } + } + + var builder = new UriBuilder(absolute) + { + Fragment = string.Empty, + }; + + var filteredQuery = FilterTrackingParameters(builder.Query, builder.Uri, language); + builder.Query = filteredQuery; + + return builder.Uri.ToString(); + } + + private static string FilterTrackingParameters(string query, Uri uri, string language) + { + if (string.IsNullOrWhiteSpace(query)) + { + return string.Empty; + } + + var trimmed = query.TrimStart('?'); + if (string.IsNullOrWhiteSpace(trimmed)) + { + return string.Empty; + } + + var parameters = trimmed.Split('&', StringSplitOptions.RemoveEmptyEntries); + var kept = new List(); + + foreach (var parameter in parameters) + { + var separatorIndex = parameter.IndexOf('='); + var key = separatorIndex >= 0 ? parameter[..separatorIndex] : parameter; + if (TrackingParameterPrefixes.Any(prefix => key.StartsWith(prefix, StringComparison.OrdinalIgnoreCase))) + { + continue; + } + + if (uri.Host.Contains("cyber.gc.ca", StringComparison.OrdinalIgnoreCase) + && key.Equals("lang", StringComparison.OrdinalIgnoreCase)) + { + kept.Add($"lang={language}"); + continue; + } + + kept.Add(parameter); + } + + if (uri.Host.Contains("cyber.gc.ca", StringComparison.OrdinalIgnoreCase) + && kept.All(parameter => !parameter.StartsWith("lang=", StringComparison.OrdinalIgnoreCase))) + { + kept.Add($"lang={language}"); + } + + return kept.Count == 0 ? string.Empty : string.Join("&", kept); + } + + private static IReadOnlyList ExtractCveIds(IDocument document) + { + if (document.Body is null) + { + return Array.Empty(); + } + + var matches = CveRegex.Matches(document.Body.TextContent ?? string.Empty); + if (matches.Count == 0) + { + return Array.Empty(); + } + + return matches + .Select(match => match.Value.ToUpperInvariant()) + .Distinct(StringComparer.Ordinal) + .OrderBy(value => value, StringComparer.Ordinal) + .ToArray(); + } + + private static string? CollapseWhitespace(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + var collapsed = CollapseWhitespaceRegex.Replace(value, " ").Trim(); + return collapsed.Length == 0 ? null : collapsed; + } +} diff --git a/src/StellaOps.Feedser.Source.Cccs/Internal/CccsMapper.cs b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsMapper.cs new file mode 100644 index 00000000..779f841a --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsMapper.cs @@ -0,0 +1,151 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using StellaOps.Feedser.Models; +using StellaOps.Feedser.Storage.Mongo.Documents; + +namespace StellaOps.Feedser.Source.Cccs.Internal; + +internal static class CccsMapper +{ + public static Advisory Map(CccsAdvisoryDto dto, DocumentRecord document, DateTimeOffset recordedAt) + { + ArgumentNullException.ThrowIfNull(dto); + ArgumentNullException.ThrowIfNull(document); + + var aliases = BuildAliases(dto); + var references = BuildReferences(dto, recordedAt); + var packages = BuildPackages(dto, recordedAt); + var provenance = new[] + { + new AdvisoryProvenance( + CccsConnectorPlugin.SourceName, + "advisory", + dto.AlertType ?? dto.SerialNumber, + recordedAt, + new[] { ProvenanceFieldMasks.Advisory }) + }; + + return new Advisory( + advisoryKey: dto.SerialNumber, + title: dto.Title, + summary: dto.Summary, + language: dto.Language, + published: dto.Published ?? dto.Modified, + modified: dto.Modified ?? dto.Published, + severity: null, + exploitKnown: false, + aliases: aliases, + references: references, + affectedPackages: packages, + cvssMetrics: Array.Empty(), + provenance: provenance); + } + + private static IReadOnlyList BuildAliases(CccsAdvisoryDto dto) + { + var aliases = new List(capacity: 4) + { + dto.SerialNumber, + }; + + if (!string.IsNullOrWhiteSpace(dto.SourceId) + && !string.Equals(dto.SourceId, dto.SerialNumber, StringComparison.OrdinalIgnoreCase)) + { + aliases.Add(dto.SourceId); + } + + foreach (var cve in dto.CveIds) + { + if (!string.IsNullOrWhiteSpace(cve)) + { + aliases.Add(cve); + } + } + + return aliases + .Where(static alias => !string.IsNullOrWhiteSpace(alias)) + .Distinct(StringComparer.OrdinalIgnoreCase) + .OrderBy(static alias => alias, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static IReadOnlyList BuildReferences(CccsAdvisoryDto dto, DateTimeOffset recordedAt) + { + var references = new List + { + new(dto.CanonicalUrl, "details", "cccs", null, new AdvisoryProvenance( + CccsConnectorPlugin.SourceName, + "reference", + dto.CanonicalUrl, + recordedAt, + new[] { ProvenanceFieldMasks.References })) + }; + + foreach (var reference in dto.References) + { + if (string.IsNullOrWhiteSpace(reference.Url)) + { + continue; + } + + references.Add(new AdvisoryReference( + reference.Url, + "reference", + "cccs", + reference.Label, + new AdvisoryProvenance( + CccsConnectorPlugin.SourceName, + "reference", + reference.Url, + recordedAt, + new[] { ProvenanceFieldMasks.References }))); + } + + return references + .DistinctBy(static reference => reference.Url, StringComparer.Ordinal) + .OrderBy(static reference => reference.Url, StringComparer.Ordinal) + .ToArray(); + } + + private static IReadOnlyList BuildPackages(CccsAdvisoryDto dto, DateTimeOffset recordedAt) + { + if (dto.Products.Count == 0) + { + return Array.Empty(); + } + + var packages = new List(dto.Products.Count); + foreach (var product in dto.Products) + { + if (string.IsNullOrWhiteSpace(product)) + { + continue; + } + + var identifier = product.Trim(); + var provenance = new AdvisoryProvenance( + CccsConnectorPlugin.SourceName, + "package", + identifier, + recordedAt, + new[] { ProvenanceFieldMasks.AffectedPackages }); + + packages.Add(new AffectedPackage( + AffectedPackageTypes.Vendor, + identifier, + platform: null, + versionRanges: Array.Empty(), + statuses: Array.Empty(), + provenance: new[] { provenance }, + normalizedVersions: Array.Empty())); + } + + return packages.Count == 0 + ? Array.Empty() + : packages + .DistinctBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase) + .OrderBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } +} diff --git a/src/StellaOps.Feedser.Source.Cccs/Internal/CccsRawAdvisoryDocument.cs b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsRawAdvisoryDocument.cs new file mode 100644 index 00000000..c5111d93 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs/Internal/CccsRawAdvisoryDocument.cs @@ -0,0 +1,58 @@ +using System; +using System.Text.Json.Serialization; + +namespace StellaOps.Feedser.Source.Cccs.Internal; + +internal sealed record CccsRawAdvisoryDocument +{ + [JsonPropertyName("sourceId")] + public string SourceId { get; init; } = string.Empty; + + [JsonPropertyName("serialNumber")] + public string? SerialNumber { get; init; } + + [JsonPropertyName("uuid")] + public string? Uuid { get; init; } + + [JsonPropertyName("language")] + public string Language { get; init; } = "en"; + + [JsonPropertyName("title")] + public string Title { get; init; } = string.Empty; + + [JsonPropertyName("summary")] + public string? Summary { get; init; } + + [JsonPropertyName("canonicalUrl")] + public string CanonicalUrl { get; init; } = string.Empty; + + [JsonPropertyName("externalUrl")] + public string? ExternalUrl { get; init; } + + [JsonPropertyName("bodyHtml")] + public string BodyHtml { get; init; } = string.Empty; + + [JsonPropertyName("bodySegments")] + public string[] BodySegments { get; init; } = Array.Empty(); + + [JsonPropertyName("alertType")] + public string? AlertType { get; init; } + + [JsonPropertyName("subject")] + public string? Subject { get; init; } + + [JsonPropertyName("banner")] + public string? Banner { get; init; } + + [JsonPropertyName("published")] + public DateTimeOffset? Published { get; init; } + + [JsonPropertyName("modified")] + public DateTimeOffset? Modified { get; init; } + + [JsonPropertyName("rawCreated")] + public string? RawDateCreated { get; init; } + + [JsonPropertyName("rawModified")] + public string? RawDateModified { get; init; } +} diff --git a/src/StellaOps.Feedser.Source.Cccs/Jobs.cs b/src/StellaOps.Feedser.Source.Cccs/Jobs.cs new file mode 100644 index 00000000..8110431d --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs/Jobs.cs @@ -0,0 +1,22 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Feedser.Core.Jobs; + +namespace StellaOps.Feedser.Source.Cccs; + +internal static class CccsJobKinds +{ + public const string Fetch = "source:cccs:fetch"; +} + +internal sealed class CccsFetchJob : IJob +{ + private readonly CccsConnector _connector; + + public CccsFetchJob(CccsConnector connector) + => _connector = connector ?? throw new ArgumentNullException(nameof(connector)); + + public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken) + => _connector.FetchAsync(context.Services, cancellationToken); +} diff --git a/src/StellaOps.Feedser.Source.Cccs/Properties/AssemblyInfo.cs b/src/StellaOps.Feedser.Source.Cccs/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..5f71f856 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Cccs/Properties/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Feedser.Source.Cccs.Tests")] diff --git a/src/StellaOps.Feedser.Source.Cccs/StellaOps.Feedser.Source.Cccs.csproj b/src/StellaOps.Feedser.Source.Cccs/StellaOps.Feedser.Source.Cccs.csproj index f7f2c154..04f9158c 100644 --- a/src/StellaOps.Feedser.Source.Cccs/StellaOps.Feedser.Source.Cccs.csproj +++ b/src/StellaOps.Feedser.Source.Cccs/StellaOps.Feedser.Source.Cccs.csproj @@ -6,11 +6,11 @@ enable - - - - - - - - + + + + + + + + diff --git a/src/StellaOps.Feedser.Source.Cccs/TASKS.md b/src/StellaOps.Feedser.Source.Cccs/TASKS.md index 69f20662..36c25954 100644 --- a/src/StellaOps.Feedser.Source.Cccs/TASKS.md +++ b/src/StellaOps.Feedser.Source.Cccs/TASKS.md @@ -2,9 +2,10 @@ | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |FEEDCONN-CCCS-02-001 Catalogue official CCCS advisory feeds|BE-Conn-CCCS|Research|**DONE (2025-10-11)** – Resolved RSS→Atom redirects (`/api/cccs/rss/v1/get?...` → `/api/cccs/atom/v1/get?...`), confirmed feed caps at 50 entries with inline HTML bodies, no `Last-Modified`/`ETag`, and `updated` timestamps in UTC. Findings and packet captures parked in `docs/feedser-connector-research-20251011.md`; retention sweep follow-up tracked in 02-007.| -|FEEDCONN-CCCS-02-002 Implement fetch & source state handling|BE-Conn-CCCS|Source.Common, Storage.Mongo|**TODO** – Register HTTP client with redirect allowance + `User-Agent` override, persist Atom payload plus derived SHA256 to `document` store, and use feed-level `` / entry `` for cursoring. Capture absence of cache headers by stamping synthetic `fetchedAt` TTL and throttling retries to 1/min (Azure App Gateway fronted).| -|FEEDCONN-CCCS-02-003 DTO/parser implementation|BE-Conn-CCCS|Source.Common|**TODO** – Build DTO that keeps original HTML in `ContentHtml` and extracts structured fields: `Serial number`, `Date`, product bullet lists, and reference hyperlinks. Strip tracking query params, collapse whitespace, and normalise French vs English feeds via `lang=` query.| -|FEEDCONN-CCCS-02-004 Canonical mapping & range primitives|BE-Conn-CCCS|Models|**TODO** – Map advisories into canonical records with aliases, references, vendor/package range primitives, and provenance. Align normalized SemVer rules per `../StellaOps.Feedser.Merge/RANGE_PRIMITIVES_COORDINATION.md`.
    2025-10-11 research trail: emit `NormalizedVersions` like `[{"scheme":"semver","type":"range","min":"","minInclusive":true,"max":"","maxInclusive":false,"notes":"cccs:bulletin-id"}]`; include provenance notes to keep storage decision reasons.| -|FEEDCONN-CCCS-02-005 Deterministic fixtures & tests|QA|Testing|**TODO** – Add regression tests with canned fixtures; support `UPDATE_CCCS_FIXTURES=1` to refresh snapshots.| -|FEEDCONN-CCCS-02-006 Observability & documentation|DevEx|Docs|**TODO** – Document connector configuration, add logging/metrics, and update backlog once feature-complete.| -|FEEDCONN-CCCS-02-007 Historical advisory harvesting plan|BE-Conn-CCCS|Research|**TODO** – HTML index exposes `?page=` pagination (`https://www.cyber.gc.ca/en/alerts-advisories?page=4` tested OK). Need to measure depth, record earliest advisory date, and confirm whether separate feeds exist for bulletins/vulnerabilities. Produce backfill plan (HTML scrape → Atom transformation) and language split guidance.| +|FEEDCONN-CCCS-02-002 Implement fetch & source state handling|BE-Conn-CCCS|Source.Common, Storage.Mongo|**DONE (2025-10-14)** – `CccsConnector.FetchAsync` now hydrates feeds via `CccsFeedClient`, persists per-entry JSON payloads with SHA256 dedupe and cursor state, throttles requests, and records taxonomy + language metadata in document state.| +|FEEDCONN-CCCS-02-003 DTO/parser implementation|BE-Conn-CCCS|Source.Common|**DONE (2025-10-14)** – Added `CccsHtmlParser` to sanitize Atom body HTML, extract serial/date/product bullets, collapse whitespace, and emit normalized reference URLs; `ParseAsync` now persists DTO records under schema `cccs.dto.v1`.| +|FEEDCONN-CCCS-02-004 Canonical mapping & range primitives|BE-Conn-CCCS|Models|**DONE (2025-10-14)** – `CccsMapper` now materializes canonical advisories (aliases from serial/source/CVEs, references incl. canonical URL, vendor package records) with provenance masks; `MapAsync` stores results in `AdvisoryStore`.| +|FEEDCONN-CCCS-02-005 Deterministic fixtures & tests|QA|Testing|**DONE (2025-10-14)** – Added English/French fixtures plus parser + connector end-to-end tests (`StellaOps.Feedser.Source.Cccs.Tests`). Canned HTTP handler + Mongo fixture enables fetch→parse→map regression; fixtures refresh via `UPDATE_CCCS_FIXTURES=1`.| +|FEEDCONN-CCCS-02-006 Observability & documentation|DevEx|Docs|**DONE (2025-10-15)** – Added `CccsDiagnostics` meter (fetch/parse/map counters), enriched connector logs with document counts, and published `docs/ops/feedser-cccs-operations.md` covering config, telemetry, and sanitiser guidance.| +|FEEDCONN-CCCS-02-007 Historical advisory harvesting plan|BE-Conn-CCCS|Research|**DONE (2025-10-15)** – Measured `/api/cccs/threats/v1/get` inventory (~5.1k rows/lang; earliest 2018-06-08), documented backfill workflow + language split strategy, and linked the runbook for Offline Kit execution.| +|FEEDCONN-CCCS-02-008 Raw DOM parsing refinement|BE-Conn-CCCS|Source.Common|**DONE (2025-10-15)** – Parser now walks unsanitised DOM (heading + nested list coverage), sanitizer keeps ``/`section` nodes, and regression fixtures/tests assert EN/FR list handling + preserved HTML structure.| diff --git a/src/StellaOps.Feedser.Source.CertBund.Tests/CertBundConnectorTests.cs b/src/StellaOps.Feedser.Source.CertBund.Tests/CertBundConnectorTests.cs new file mode 100644 index 00000000..504fe6c9 --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund.Tests/CertBundConnectorTests.cs @@ -0,0 +1,188 @@ +using System; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using FluentAssertions; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using MongoDB.Bson; +using StellaOps.Feedser.Source.CertBund.Configuration; +using StellaOps.Feedser.Source.Common.Http; +using StellaOps.Feedser.Source.Common; +using StellaOps.Feedser.Source.Common.Fetch; +using StellaOps.Feedser.Source.Common.Testing; +using StellaOps.Feedser.Storage.Mongo; +using StellaOps.Feedser.Storage.Mongo.Advisories; +using StellaOps.Feedser.Storage.Mongo.Documents; +using StellaOps.Feedser.Storage.Mongo.Dtos; +using StellaOps.Feedser.Testing; +using Xunit; + +namespace StellaOps.Feedser.Source.CertBund.Tests; + +[Collection("mongo-fixture")] +public sealed class CertBundConnectorTests : IAsyncLifetime +{ + private static readonly Uri FeedUri = new("https://test.local/content/public/securityAdvisory/rss"); + private static readonly Uri PortalUri = new("https://test.local/portal/"); + private static readonly Uri DetailUri = new("https://test.local/portal/api/securityadvisory?name=WID-SEC-2025-2264"); + + private readonly MongoIntegrationFixture _fixture; + private readonly CannedHttpMessageHandler _handler; + + public CertBundConnectorTests(MongoIntegrationFixture fixture) + { + _fixture = fixture; + _handler = new CannedHttpMessageHandler(); + } + + [Fact] + public async Task FetchParseMap_ProducesCanonicalAdvisory() + { + await using var provider = await BuildServiceProviderAsync(); + SeedResponses(); + + var connector = provider.GetRequiredService(); + await connector.FetchAsync(provider, CancellationToken.None); + await connector.ParseAsync(provider, CancellationToken.None); + await connector.MapAsync(provider, CancellationToken.None); + + var advisoryStore = provider.GetRequiredService(); + var advisories = await advisoryStore.GetRecentAsync(5, CancellationToken.None); + advisories.Should().HaveCount(1); + + var advisory = advisories[0]; + advisory.AdvisoryKey.Should().Be("WID-SEC-2025-2264"); + advisory.Aliases.Should().Contain("CVE-2025-1234"); + advisory.AffectedPackages.Should().Contain(package => package.Identifier.Contains("Ivanti")); + advisory.References.Should().Contain(reference => reference.Url == DetailUri.ToString()); + advisory.Language.Should().Be("de"); + + var stateRepository = provider.GetRequiredService(); + var state = await stateRepository.TryGetAsync(CertBundConnectorPlugin.SourceName, CancellationToken.None); + state.Should().NotBeNull(); + state!.Cursor.Should().NotBeNull(); + state.Cursor.TryGetValue("pendingDocuments", out var pendingDocs).Should().BeTrue(); + pendingDocs!.AsBsonArray.Should().BeEmpty(); + state.Cursor.TryGetValue("pendingMappings", out var pendingMappings).Should().BeTrue(); + pendingMappings!.AsBsonArray.Should().BeEmpty(); + } + + [Fact] + public async Task Fetch_PersistsDocumentWithMetadata() + { + await using var provider = await BuildServiceProviderAsync(); + SeedResponses(); + + var connector = provider.GetRequiredService(); + await connector.FetchAsync(provider, CancellationToken.None); + + var documentStore = provider.GetRequiredService(); + var document = await documentStore.FindBySourceAndUriAsync(CertBundConnectorPlugin.SourceName, DetailUri.ToString(), CancellationToken.None); + document.Should().NotBeNull(); + document!.Metadata.Should().ContainKey("certbund.advisoryId").WhoseValue.Should().Be("WID-SEC-2025-2264"); + document.Metadata.Should().ContainKey("certbund.category"); + document.Metadata.Should().ContainKey("certbund.published"); + document.Status.Should().Be(DocumentStatuses.PendingParse); + + var stateRepository = provider.GetRequiredService(); + var state = await stateRepository.TryGetAsync(CertBundConnectorPlugin.SourceName, CancellationToken.None); + state.Should().NotBeNull(); + state!.Cursor.Should().NotBeNull(); + state.Cursor.TryGetValue("pendingDocuments", out var pendingDocs).Should().BeTrue(); + pendingDocs!.AsBsonArray.Should().HaveCount(1); + } + + private async Task BuildServiceProviderAsync() + { + await _fixture.Client.DropDatabaseAsync(_fixture.Database.DatabaseNamespace.DatabaseName); + _handler.Clear(); + + var services = new ServiceCollection(); + services.AddLogging(builder => builder.AddProvider(NullLoggerProvider.Instance)); + services.AddSingleton(_handler); + + services.AddMongoStorage(options => + { + options.ConnectionString = _fixture.Runner.ConnectionString; + options.DatabaseName = _fixture.Database.DatabaseNamespace.DatabaseName; + options.CommandTimeout = TimeSpan.FromSeconds(5); + }); + + services.AddSourceCommon(); + services.AddCertBundConnector(options => + { + options.FeedUri = FeedUri; + options.PortalBootstrapUri = PortalUri; + options.DetailApiUri = new Uri("https://test.local/portal/api/securityadvisory"); + options.RequestDelay = TimeSpan.Zero; + options.MaxAdvisoriesPerFetch = 10; + options.MaxKnownAdvisories = 32; + }); + + services.Configure(CertBundOptions.HttpClientName, builderOptions => + { + builderOptions.HttpMessageHandlerBuilderActions.Add(builder => + { + builder.PrimaryHandler = _handler; + }); + }); + + var provider = services.BuildServiceProvider(); + var bootstrapper = provider.GetRequiredService(); + await bootstrapper.InitializeAsync(CancellationToken.None); + return provider; + } + + private void SeedResponses() + { + AddJsonResponse(DetailUri, ReadFixture("certbund-detail.json")); + AddXmlResponse(FeedUri, ReadFixture("certbund-feed.xml"), "application/rss+xml"); + AddHtmlResponse(PortalUri, "OK"); + } + + private void AddJsonResponse(Uri uri, string json, string? etag = null) + { + _handler.AddResponse(uri, () => + { + var response = new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(json, Encoding.UTF8, "application/json"), + }; + if (!string.IsNullOrWhiteSpace(etag)) + { + response.Headers.ETag = new EntityTagHeaderValue(etag); + } + + return response; + }); + } + + private void AddXmlResponse(Uri uri, string xml, string contentType) + { + _handler.AddResponse(uri, () => new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(xml, Encoding.UTF8, contentType), + }); + } + + private void AddHtmlResponse(Uri uri, string html) + { + _handler.AddResponse(uri, () => new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(html, Encoding.UTF8, "text/html"), + }); + } + + private static string ReadFixture(string fileName) + => System.IO.File.ReadAllText(System.IO.Path.Combine(AppContext.BaseDirectory, "Fixtures", fileName)); + + public Task InitializeAsync() => Task.CompletedTask; + + public Task DisposeAsync() => Task.CompletedTask; +} diff --git a/src/StellaOps.Feedser.Source.CertBund.Tests/Fixtures/certbund-detail.json b/src/StellaOps.Feedser.Source.CertBund.Tests/Fixtures/certbund-detail.json new file mode 100644 index 00000000..9c545914 --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund.Tests/Fixtures/certbund-detail.json @@ -0,0 +1,36 @@ +{ + "name": "WID-SEC-2025-2264", + "title": "Ivanti Endpoint Manager: Mehrere Schwachstellen ermöglichen Codeausführung", + "summary": "Ein entfernter, anonymer Angreifer kann mehrere Schwachstellen in Ivanti Endpoint Manager ausnutzen.", + "description": "

    Ivanti Endpoint Manager weist mehrere Schwachstellen auf.

    Ein Angreifer kann beliebigen Code ausführen.

    ", + "severity": "hoch", + "language": "de", + "published": "2025-10-14T06:24:49Z", + "updated": "2025-10-14T07:00:00Z", + "cveIds": [ + "CVE-2025-1234", + "CVE-2025-5678" + ], + "references": [ + { + "url": "https://example.com/vendor/advisory", + "label": "Vendor Advisory" + }, + { + "url": "https://example.com/mitre", + "label": "MITRE" + } + ], + "products": [ + { + "vendor": "Ivanti", + "name": "Endpoint Manager", + "versions": "2023.1 bis 2024.2" + }, + { + "vendor": "Ivanti", + "name": "Endpoint Manager Cloud", + "versions": "alle" + } + ] +} diff --git a/src/StellaOps.Feedser.Source.CertBund.Tests/Fixtures/certbund-feed.xml b/src/StellaOps.Feedser.Source.CertBund.Tests/Fixtures/certbund-feed.xml new file mode 100644 index 00000000..67f3ff2d --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund.Tests/Fixtures/certbund-feed.xml @@ -0,0 +1,15 @@ + + + + BSI Warn- und Informationsdienst + https://wid.cert-bund.de/portal/wid/securityadvisory + Test feed + Tue, 14 Oct 2025 07:06:21 GMT + + [hoch] Ivanti Endpoint Manager: Mehrere Schwachstellen ermöglichen Codeausführung + https://wid.cert-bund.de/portal/wid/securityadvisory?name=WID-SEC-2025-2264 + hoch + Tue, 14 Oct 2025 06:24:49 GMT + + + diff --git a/src/StellaOps.Feedser.Source.CertBund.Tests/StellaOps.Feedser.Source.CertBund.Tests.csproj b/src/StellaOps.Feedser.Source.CertBund.Tests/StellaOps.Feedser.Source.CertBund.Tests.csproj new file mode 100644 index 00000000..d31cb11a --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund.Tests/StellaOps.Feedser.Source.CertBund.Tests.csproj @@ -0,0 +1,22 @@ + + + net10.0 + enable + enable + + + + + + + + + + + PreserveNewest + + + PreserveNewest + + + diff --git a/src/StellaOps.Feedser.Source.CertBund/CertBundConnector.cs b/src/StellaOps.Feedser.Source.CertBund/CertBundConnector.cs new file mode 100644 index 00000000..e56b6623 --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/CertBundConnector.cs @@ -0,0 +1,435 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Bson; +using StellaOps.Feedser.Source.CertBund.Configuration; +using StellaOps.Feedser.Source.CertBund.Internal; +using StellaOps.Feedser.Source.Common; +using StellaOps.Feedser.Source.Common.Fetch; +using StellaOps.Feedser.Source.Common.Html; +using StellaOps.Feedser.Storage.Mongo; +using StellaOps.Feedser.Storage.Mongo.Advisories; +using StellaOps.Feedser.Storage.Mongo.Documents; +using StellaOps.Feedser.Storage.Mongo.Dtos; +using StellaOps.Plugin; + +namespace StellaOps.Feedser.Source.CertBund; + +public sealed class CertBundConnector : IFeedConnector +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull, + }; + + private readonly CertBundFeedClient _feedClient; + private readonly CertBundDetailParser _detailParser; + private readonly SourceFetchService _fetchService; + private readonly RawDocumentStorage _rawDocumentStorage; + private readonly IDocumentStore _documentStore; + private readonly IDtoStore _dtoStore; + private readonly IAdvisoryStore _advisoryStore; + private readonly ISourceStateRepository _stateRepository; + private readonly CertBundOptions _options; + private readonly TimeProvider _timeProvider; + private readonly CertBundDiagnostics _diagnostics; + private readonly ILogger _logger; + + public CertBundConnector( + CertBundFeedClient feedClient, + CertBundDetailParser detailParser, + SourceFetchService fetchService, + RawDocumentStorage rawDocumentStorage, + IDocumentStore documentStore, + IDtoStore dtoStore, + IAdvisoryStore advisoryStore, + ISourceStateRepository stateRepository, + IOptions options, + CertBundDiagnostics diagnostics, + TimeProvider? timeProvider, + ILogger logger) + { + _feedClient = feedClient ?? throw new ArgumentNullException(nameof(feedClient)); + _detailParser = detailParser ?? throw new ArgumentNullException(nameof(detailParser)); + _fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService)); + _rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage)); + _documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore)); + _dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore)); + _advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore)); + _stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository)); + _options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options)); + _options.Validate(); + _diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics)); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public string SourceName => CertBundConnectorPlugin.SourceName; + + public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + var now = _timeProvider.GetUtcNow(); + IReadOnlyList feedItems; + + _diagnostics.FeedFetchAttempt(); + try + { + feedItems = await _feedClient.LoadAsync(cancellationToken).ConfigureAwait(false); + _diagnostics.FeedFetchSuccess(feedItems.Count); + } + catch (Exception ex) + { + _logger.LogError(ex, "CERT-Bund feed fetch failed"); + _diagnostics.FeedFetchFailure(); + await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false); + throw; + } + + var coverageDays = CalculateCoverageDays(feedItems, now); + _diagnostics.RecordFeedCoverage(coverageDays); + + if (feedItems.Count == 0) + { + await UpdateCursorAsync(cursor.WithLastFetch(now), cancellationToken).ConfigureAwait(false); + return; + } + + var pendingDocuments = cursor.PendingDocuments.ToHashSet(); + var pendingMappings = cursor.PendingMappings.ToHashSet(); + var knownAdvisories = new HashSet(cursor.KnownAdvisories, StringComparer.OrdinalIgnoreCase); + var processed = 0; + var alreadyKnown = 0; + var notModified = 0; + var detailFailures = 0; + var truncated = false; + var latestPublished = cursor.LastPublished ?? DateTimeOffset.MinValue; + + foreach (var item in feedItems.OrderByDescending(static i => i.Published)) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (knownAdvisories.Contains(item.AdvisoryId)) + { + alreadyKnown++; + continue; + } + + if (processed >= _options.MaxAdvisoriesPerFetch) + { + truncated = true; + break; + } + + try + { + _diagnostics.DetailFetchAttempt(); + var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, item.DetailUri.ToString(), cancellationToken).ConfigureAwait(false); + var request = new SourceFetchRequest(CertBundOptions.HttpClientName, SourceName, item.DetailUri) + { + AcceptHeaders = new[] { "application/json", "text/json" }, + Metadata = CertBundDocumentMetadata.CreateMetadata(item), + ETag = existing?.Etag, + LastModified = existing?.LastModified, + TimeoutOverride = _options.RequestTimeout, + }; + + var result = await _fetchService.FetchAsync(request, cancellationToken).ConfigureAwait(false); + if (result.IsNotModified) + { + _diagnostics.DetailFetchNotModified(); + notModified++; + knownAdvisories.Add(item.AdvisoryId); + continue; + } + + if (!result.IsSuccess || result.Document is null) + { + _diagnostics.DetailFetchFailure("skipped"); + detailFailures++; + continue; + } + + _diagnostics.DetailFetchSuccess(); + pendingDocuments.Add(result.Document.Id); + pendingMappings.Remove(result.Document.Id); + knownAdvisories.Add(item.AdvisoryId); + processed++; + + if (_options.RequestDelay > TimeSpan.Zero) + { + await Task.Delay(_options.RequestDelay, cancellationToken).ConfigureAwait(false); + } + } + catch (Exception ex) + { + _logger.LogError(ex, "CERT-Bund detail fetch failed for {AdvisoryId}", item.AdvisoryId); + _diagnostics.DetailFetchFailure("exception"); + detailFailures++; + await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false); + throw; + } + + if (item.Published > latestPublished) + { + latestPublished = item.Published; + } + } + + _diagnostics.DetailFetchEnqueued(processed); + + if (feedItems.Count > 0 || processed > 0 || detailFailures > 0) + { + _logger.LogInformation( + "CERT-Bund fetch cycle: feed items {FeedItems}, enqueued {Enqueued}, already known {Known}, not modified {NotModified}, detail failures {DetailFailures}, pending documents {PendingDocuments}, pending mappings {PendingMappings}, truncated {Truncated}, coverageDays={CoverageDays}", + feedItems.Count, + processed, + alreadyKnown, + notModified, + detailFailures, + pendingDocuments.Count, + pendingMappings.Count, + truncated, + coverageDays ?? double.NaN); + } + + var trimmedKnown = knownAdvisories.Count > _options.MaxKnownAdvisories + ? knownAdvisories.OrderByDescending(id => id, StringComparer.OrdinalIgnoreCase) + .Take(_options.MaxKnownAdvisories) + .ToArray() + : knownAdvisories.ToArray(); + + var updatedCursor = cursor + .WithPendingDocuments(pendingDocuments) + .WithPendingMappings(pendingMappings) + .WithKnownAdvisories(trimmedKnown) + .WithLastPublished(latestPublished == DateTimeOffset.MinValue ? cursor.LastPublished : latestPublished) + .WithLastFetch(now); + + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + } + + public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + if (cursor.PendingDocuments.Count == 0) + { + return; + } + + var remainingDocuments = cursor.PendingDocuments.ToHashSet(); + var pendingMappings = cursor.PendingMappings.ToHashSet(); + var now = _timeProvider.GetUtcNow(); + var parsedCount = 0; + var failedCount = 0; + + foreach (var documentId in cursor.PendingDocuments) + { + cancellationToken.ThrowIfCancellationRequested(); + + var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); + if (document is null) + { + remainingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + continue; + } + + if (!document.GridFsId.HasValue) + { + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + remainingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + _diagnostics.ParseFailure("missing_payload"); + failedCount++; + continue; + } + + byte[] payload; + try + { + payload = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogError(ex, "CERT-Bund unable to download document {DocumentId}", document.Id); + _diagnostics.ParseFailure("download_failed"); + throw; + } + + CertBundAdvisoryDto dto; + try + { + dto = _detailParser.Parse(new Uri(document.Uri), new Uri(document.Metadata?["certbund.portalUri"] ?? document.Uri), payload); + } + catch (Exception ex) + { + _logger.LogError(ex, "CERT-Bund failed to parse advisory detail {DocumentId}", document.Id); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + remainingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + _diagnostics.ParseFailure("parse_error"); + failedCount++; + continue; + } + + _diagnostics.ParseSuccess(dto.Products.Count, dto.CveIds.Count); + parsedCount++; + + var bson = BsonDocument.Parse(JsonSerializer.Serialize(dto, SerializerOptions)); + var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "cert-bund.detail.v1", bson, now); + await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false); + + remainingDocuments.Remove(documentId); + pendingMappings.Add(document.Id); + } + + if (cursor.PendingDocuments.Count > 0) + { + _logger.LogInformation( + "CERT-Bund parse cycle: parsed {Parsed}, failures {Failures}, remaining documents {RemainingDocuments}, pending mappings {PendingMappings}", + parsedCount, + failedCount, + remainingDocuments.Count, + pendingMappings.Count); + } + + var updatedCursor = cursor + .WithPendingDocuments(remainingDocuments) + .WithPendingMappings(pendingMappings); + + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + } + + public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + if (cursor.PendingMappings.Count == 0) + { + return; + } + + var pendingMappings = cursor.PendingMappings.ToHashSet(); + var mappedCount = 0; + var failedCount = 0; + + foreach (var documentId in cursor.PendingMappings) + { + cancellationToken.ThrowIfCancellationRequested(); + + var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); + if (document is null) + { + pendingMappings.Remove(documentId); + continue; + } + + var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false); + if (dtoRecord is null) + { + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + _diagnostics.MapFailure("missing_dto"); + failedCount++; + continue; + } + + CertBundAdvisoryDto? dto; + try + { + dto = JsonSerializer.Deserialize(dtoRecord.Payload.ToJson(), SerializerOptions); + } + catch (Exception ex) + { + _logger.LogError(ex, "CERT-Bund failed to deserialize DTO for document {DocumentId}", document.Id); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + _diagnostics.MapFailure("deserialize_failed"); + failedCount++; + continue; + } + + if (dto is null) + { + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + _diagnostics.MapFailure("null_dto"); + failedCount++; + continue; + } + + try + { + var advisory = CertBundMapper.Map(dto, document, dtoRecord.ValidatedAt); + await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + _diagnostics.MapSuccess(advisory.AffectedPackages.Length, advisory.Aliases.Length); + mappedCount++; + } + catch (Exception ex) + { + _logger.LogError(ex, "CERT-Bund mapping failed for document {DocumentId}", document.Id); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + _diagnostics.MapFailure("exception"); + failedCount++; + } + } + + if (cursor.PendingMappings.Count > 0) + { + _logger.LogInformation( + "CERT-Bund map cycle: mapped {Mapped}, failures {Failures}, remaining pending mappings {PendingMappings}", + mappedCount, + failedCount, + pendingMappings.Count); + } + + var updatedCursor = cursor.WithPendingMappings(pendingMappings); + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + } + + private static double? CalculateCoverageDays(IReadOnlyList items, DateTimeOffset fetchedAt) + { + if (items is null || items.Count == 0) + { + return null; + } + + var oldest = items.Min(static item => item.Published); + if (oldest == DateTimeOffset.MinValue) + { + return null; + } + + var span = fetchedAt - oldest; + return span >= TimeSpan.Zero ? span.TotalDays : null; + } + + private async Task GetCursorAsync(CancellationToken cancellationToken) + { + var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false); + return state is null ? CertBundCursor.Empty : CertBundCursor.FromBson(state.Cursor); + } + + private Task UpdateCursorAsync(CertBundCursor cursor, CancellationToken cancellationToken) + { + var document = cursor.ToBsonDocument(); + var completedAt = cursor.LastFetchAt ?? _timeProvider.GetUtcNow(); + return _stateRepository.UpdateCursorAsync(SourceName, document, completedAt, cancellationToken); + } +} diff --git a/src/StellaOps.Feedser.Source.CertBund/CertBundConnectorPlugin.cs b/src/StellaOps.Feedser.Source.CertBund/CertBundConnectorPlugin.cs new file mode 100644 index 00000000..00b3be7f --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/CertBundConnectorPlugin.cs @@ -0,0 +1,21 @@ +using System; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Plugin; + +namespace StellaOps.Feedser.Source.CertBund; + +public sealed class CertBundConnectorPlugin : IConnectorPlugin +{ + public const string SourceName = "cert-bund"; + + public string Name => SourceName; + + public bool IsAvailable(IServiceProvider services) + => services.GetService() is not null; + + public IFeedConnector Create(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + return services.GetRequiredService(); + } +} diff --git a/src/StellaOps.Feedser.Source.CertBund/CertBundDependencyInjectionRoutine.cs b/src/StellaOps.Feedser.Source.CertBund/CertBundDependencyInjectionRoutine.cs new file mode 100644 index 00000000..7bba7602 --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/CertBundDependencyInjectionRoutine.cs @@ -0,0 +1,50 @@ +using System; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.DependencyInjection; +using StellaOps.Feedser.Core.Jobs; +using StellaOps.Feedser.Source.CertBund.Configuration; + +namespace StellaOps.Feedser.Source.CertBund; + +public sealed class CertBundDependencyInjectionRoutine : IDependencyInjectionRoutine +{ + private const string ConfigurationSection = "feedser:sources:cert-bund"; + + public IServiceCollection Register(IServiceCollection services, IConfiguration configuration) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services.AddCertBundConnector(options => + { + configuration.GetSection(ConfigurationSection).Bind(options); + options.Validate(); + }); + + services.AddTransient(); + + services.PostConfigure(options => + { + EnsureJob(options, CertBundJobKinds.Fetch, typeof(CertBundFetchJob)); + }); + + return services; + } + + private static void EnsureJob(JobSchedulerOptions options, string kind, Type jobType) + { + if (options.Definitions.ContainsKey(kind)) + { + return; + } + + options.Definitions[kind] = new JobDefinition( + kind, + jobType, + options.DefaultTimeout, + options.DefaultLeaseDuration, + CronExpression: null, + Enabled: true); + } +} diff --git a/src/StellaOps.Feedser.Source.CertBund/CertBundServiceCollectionExtensions.cs b/src/StellaOps.Feedser.Source.CertBund/CertBundServiceCollectionExtensions.cs new file mode 100644 index 00000000..0385bfc4 --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/CertBundServiceCollectionExtensions.cs @@ -0,0 +1,48 @@ +using System; +using System.Net; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; +using StellaOps.Feedser.Source.CertBund.Configuration; +using StellaOps.Feedser.Source.CertBund.Internal; +using StellaOps.Feedser.Source.Common.Html; +using StellaOps.Feedser.Source.Common.Http; + +namespace StellaOps.Feedser.Source.CertBund; + +public static class CertBundServiceCollectionExtensions +{ + public static IServiceCollection AddCertBundConnector(this IServiceCollection services, Action configure) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configure); + + services.AddOptions() + .Configure(configure) + .PostConfigure(static options => options.Validate()); + + services.AddSourceHttpClient(CertBundOptions.HttpClientName, static (sp, clientOptions) => + { + var options = sp.GetRequiredService>().Value; + clientOptions.Timeout = options.RequestTimeout; + clientOptions.UserAgent = "StellaOps.Feedser.CertBund/1.0"; + clientOptions.AllowedHosts.Clear(); + clientOptions.AllowedHosts.Add(options.FeedUri.Host); + clientOptions.AllowedHosts.Add(options.DetailApiUri.Host); + clientOptions.AllowedHosts.Add(options.PortalBootstrapUri.Host); + clientOptions.ConfigureHandler = handler => + { + handler.AutomaticDecompression = DecompressionMethods.All; + handler.UseCookies = true; + handler.CookieContainer = new System.Net.CookieContainer(); + }; + }); + + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.AddTransient(); + return services; + } +} diff --git a/src/StellaOps.Feedser.Source.CertBund/Class1.cs b/src/StellaOps.Feedser.Source.CertBund/Class1.cs deleted file mode 100644 index 2eb03580..00000000 --- a/src/StellaOps.Feedser.Source.CertBund/Class1.cs +++ /dev/null @@ -1,29 +0,0 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Plugin; - -namespace StellaOps.Feedser.Source.CertBund; - -public sealed class CertBundConnectorPlugin : IConnectorPlugin -{ - public string Name => "certbund"; - - public bool IsAvailable(IServiceProvider services) => true; - - public IFeedConnector Create(IServiceProvider services) => new StubConnector(Name); - - private sealed class StubConnector : IFeedConnector - { - public StubConnector(string sourceName) => SourceName = sourceName; - - public string SourceName { get; } - - public Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - - public Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - - public Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - } -} - diff --git a/src/StellaOps.Feedser.Source.CertBund/Configuration/CertBundOptions.cs b/src/StellaOps.Feedser.Source.CertBund/Configuration/CertBundOptions.cs new file mode 100644 index 00000000..5a8c6fc8 --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/Configuration/CertBundOptions.cs @@ -0,0 +1,104 @@ +using System.Net; + +namespace StellaOps.Feedser.Source.CertBund.Configuration; + +public sealed class CertBundOptions +{ + public const string HttpClientName = "feedser.source.certbund"; + + /// + /// RSS feed providing the latest CERT-Bund advisories. + /// + public Uri FeedUri { get; set; } = new("https://wid.cert-bund.de/content/public/securityAdvisory/rss"); + + /// + /// Portal endpoint used to bootstrap session cookies (required for the SPA JSON API). + /// + public Uri PortalBootstrapUri { get; set; } = new("https://wid.cert-bund.de/portal/"); + + /// + /// Detail API endpoint template; advisory identifier is appended as the name query parameter. + /// + public Uri DetailApiUri { get; set; } = new("https://wid.cert-bund.de/portal/api/securityadvisory"); + + /// + /// Optional timeout override for feed/detail requests. + /// + public TimeSpan RequestTimeout { get; set; } = TimeSpan.FromSeconds(30); + + /// + /// Delay applied between successive detail fetches to respect upstream politeness. + /// + public TimeSpan RequestDelay { get; set; } = TimeSpan.FromMilliseconds(250); + + /// + /// Backoff recorded in source state when a fetch attempt fails. + /// + public TimeSpan FailureBackoff { get; set; } = TimeSpan.FromMinutes(5); + + /// + /// Maximum number of advisories to enqueue per fetch iteration. + /// + public int MaxAdvisoriesPerFetch { get; set; } = 50; + + /// + /// Maximum number of advisory identifiers remembered to prevent re-processing. + /// + public int MaxKnownAdvisories { get; set; } = 512; + + public void Validate() + { + if (FeedUri is null || !FeedUri.IsAbsoluteUri) + { + throw new InvalidOperationException("CERT-Bund feed URI must be an absolute URI."); + } + + if (PortalBootstrapUri is null || !PortalBootstrapUri.IsAbsoluteUri) + { + throw new InvalidOperationException("CERT-Bund portal bootstrap URI must be an absolute URI."); + } + + if (DetailApiUri is null || !DetailApiUri.IsAbsoluteUri) + { + throw new InvalidOperationException("CERT-Bund detail API URI must be an absolute URI."); + } + + if (RequestTimeout <= TimeSpan.Zero) + { + throw new InvalidOperationException($"{nameof(RequestTimeout)} must be positive."); + } + + if (RequestDelay < TimeSpan.Zero) + { + throw new InvalidOperationException($"{nameof(RequestDelay)} cannot be negative."); + } + + if (FailureBackoff <= TimeSpan.Zero) + { + throw new InvalidOperationException($"{nameof(FailureBackoff)} must be positive."); + } + + if (MaxAdvisoriesPerFetch <= 0) + { + throw new InvalidOperationException($"{nameof(MaxAdvisoriesPerFetch)} must be greater than zero."); + } + + if (MaxKnownAdvisories <= 0) + { + throw new InvalidOperationException($"{nameof(MaxKnownAdvisories)} must be greater than zero."); + } + } + + public Uri BuildDetailUri(string advisoryId) + { + if (string.IsNullOrWhiteSpace(advisoryId)) + { + throw new ArgumentException("Advisory identifier must be provided.", nameof(advisoryId)); + } + + var builder = new UriBuilder(DetailApiUri); + var queryPrefix = string.IsNullOrEmpty(builder.Query) ? string.Empty : builder.Query.TrimStart('?') + "&"; + builder.Query = $"{queryPrefix}name={Uri.EscapeDataString(advisoryId)}"; + return builder.Uri; + } +} diff --git a/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundAdvisoryDto.cs b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundAdvisoryDto.cs new file mode 100644 index 00000000..0c178bf7 --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundAdvisoryDto.cs @@ -0,0 +1,68 @@ +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Feedser.Source.CertBund.Internal; + +public sealed record CertBundAdvisoryDto +{ + [JsonPropertyName("advisoryId")] + public string AdvisoryId { get; init; } = string.Empty; + + [JsonPropertyName("title")] + public string Title { get; init; } = string.Empty; + + [JsonPropertyName("summary")] + public string? Summary { get; init; } + + [JsonPropertyName("contentHtml")] + public string ContentHtml { get; init; } = string.Empty; + + [JsonPropertyName("severity")] + public string? Severity { get; init; } + + [JsonPropertyName("language")] + public string Language { get; init; } = "de"; + + [JsonPropertyName("published")] + public DateTimeOffset? Published { get; init; } + + [JsonPropertyName("modified")] + public DateTimeOffset? Modified { get; init; } + + [JsonPropertyName("portalUri")] + public Uri PortalUri { get; init; } = new("https://wid.cert-bund.de/"); + + [JsonPropertyName("detailUri")] + public Uri DetailUri { get; init; } = new("https://wid.cert-bund.de/"); + + [JsonPropertyName("cveIds")] + public IReadOnlyList CveIds { get; init; } = Array.Empty(); + + [JsonPropertyName("products")] + public IReadOnlyList Products { get; init; } = Array.Empty(); + + [JsonPropertyName("references")] + public IReadOnlyList References { get; init; } = Array.Empty(); +} + +public sealed record CertBundProductDto +{ + [JsonPropertyName("vendor")] + public string? Vendor { get; init; } + + [JsonPropertyName("name")] + public string? Name { get; init; } + + [JsonPropertyName("versions")] + public string? Versions { get; init; } +} + +public sealed record CertBundReferenceDto +{ + [JsonPropertyName("url")] + public string Url { get; init; } = string.Empty; + + [JsonPropertyName("label")] + public string? Label { get; init; } +} diff --git a/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundCursor.cs b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundCursor.cs new file mode 100644 index 00000000..32326bb4 --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundCursor.cs @@ -0,0 +1,118 @@ +using System; +using System.Linq; +using MongoDB.Bson; + +namespace StellaOps.Feedser.Source.CertBund.Internal; + +internal sealed record CertBundCursor( + IReadOnlyCollection PendingDocuments, + IReadOnlyCollection PendingMappings, + IReadOnlyCollection KnownAdvisories, + DateTimeOffset? LastPublished, + DateTimeOffset? LastFetchAt) +{ + private static readonly IReadOnlyCollection EmptyGuids = Array.Empty(); + private static readonly IReadOnlyCollection EmptyStrings = Array.Empty(); + + public static CertBundCursor Empty { get; } = new(EmptyGuids, EmptyGuids, EmptyStrings, null, null); + + public CertBundCursor WithPendingDocuments(IEnumerable documents) + => this with { PendingDocuments = Distinct(documents) }; + + public CertBundCursor WithPendingMappings(IEnumerable mappings) + => this with { PendingMappings = Distinct(mappings) }; + + public CertBundCursor WithKnownAdvisories(IEnumerable advisories) + => this with { KnownAdvisories = advisories?.Distinct(StringComparer.OrdinalIgnoreCase).ToArray() ?? EmptyStrings }; + + public CertBundCursor WithLastPublished(DateTimeOffset? published) + => this with { LastPublished = published }; + + public CertBundCursor WithLastFetch(DateTimeOffset? timestamp) + => this with { LastFetchAt = timestamp }; + + public BsonDocument ToBsonDocument() + { + var document = new BsonDocument + { + ["pendingDocuments"] = new BsonArray(PendingDocuments.Select(id => id.ToString())), + ["pendingMappings"] = new BsonArray(PendingMappings.Select(id => id.ToString())), + ["knownAdvisories"] = new BsonArray(KnownAdvisories), + }; + + if (LastPublished.HasValue) + { + document["lastPublished"] = LastPublished.Value.UtcDateTime; + } + + if (LastFetchAt.HasValue) + { + document["lastFetchAt"] = LastFetchAt.Value.UtcDateTime; + } + + return document; + } + + public static CertBundCursor FromBson(BsonDocument? document) + { + if (document is null || document.ElementCount == 0) + { + return Empty; + } + + var pendingDocuments = ReadGuidArray(document, "pendingDocuments"); + var pendingMappings = ReadGuidArray(document, "pendingMappings"); + var knownAdvisories = ReadStringArray(document, "knownAdvisories"); + var lastPublished = document.TryGetValue("lastPublished", out var publishedValue) + ? ParseDate(publishedValue) + : null; + var lastFetch = document.TryGetValue("lastFetchAt", out var fetchValue) + ? ParseDate(fetchValue) + : null; + + return new CertBundCursor(pendingDocuments, pendingMappings, knownAdvisories, lastPublished, lastFetch); + } + + private static IReadOnlyCollection Distinct(IEnumerable? values) + => values?.Distinct().ToArray() ?? EmptyGuids; + + private static IReadOnlyCollection ReadGuidArray(BsonDocument document, string field) + { + if (!document.TryGetValue(field, out var value) || value is not BsonArray array) + { + return EmptyGuids; + } + + var items = new List(array.Count); + foreach (var element in array) + { + if (Guid.TryParse(element?.ToString(), out var id)) + { + items.Add(id); + } + } + + return items; + } + + private static IReadOnlyCollection ReadStringArray(BsonDocument document, string field) + { + if (!document.TryGetValue(field, out var value) || value is not BsonArray array) + { + return EmptyStrings; + } + + return array.Select(element => element?.ToString() ?? string.Empty) + .Where(static s => !string.IsNullOrWhiteSpace(s)) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static DateTimeOffset? ParseDate(BsonValue value) + => value.BsonType switch + { + BsonType.DateTime => DateTime.SpecifyKind(value.ToUniversalTime(), DateTimeKind.Utc), + BsonType.String when DateTimeOffset.TryParse(value.AsString, out var parsed) => parsed.ToUniversalTime(), + _ => null, + }; +} diff --git a/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundDetailParser.cs b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundDetailParser.cs new file mode 100644 index 00000000..946925f3 --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundDetailParser.cs @@ -0,0 +1,87 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Serialization; +using StellaOps.Feedser.Source.Common.Html; + +namespace StellaOps.Feedser.Source.CertBund.Internal; + +public sealed class CertBundDetailParser +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + }; + + private readonly HtmlContentSanitizer _sanitizer; + + public CertBundDetailParser(HtmlContentSanitizer sanitizer) + => _sanitizer = sanitizer ?? throw new ArgumentNullException(nameof(sanitizer)); + + public CertBundAdvisoryDto Parse(Uri detailUri, Uri portalUri, byte[] payload) + { + var detail = JsonSerializer.Deserialize(payload, SerializerOptions) + ?? throw new InvalidOperationException("CERT-Bund detail payload deserialized to null."); + + var advisoryId = detail.Name ?? throw new InvalidOperationException("CERT-Bund detail missing advisory name."); + var contentHtml = _sanitizer.Sanitize(detail.Description ?? string.Empty, portalUri); + + return new CertBundAdvisoryDto + { + AdvisoryId = advisoryId, + Title = detail.Title ?? advisoryId, + Summary = detail.Summary, + ContentHtml = contentHtml, + Severity = detail.Severity, + Language = string.IsNullOrWhiteSpace(detail.Language) ? "de" : detail.Language!, + Published = detail.Published, + Modified = detail.Updated ?? detail.Published, + PortalUri = portalUri, + DetailUri = detailUri, + CveIds = detail.CveIds?.Where(static id => !string.IsNullOrWhiteSpace(id)) + .Select(static id => id!.Trim()) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray() ?? Array.Empty(), + References = MapReferences(detail.References), + Products = MapProducts(detail.Products), + }; + } + + private static IReadOnlyList MapReferences(CertBundDetailReference[]? references) + { + if (references is null || references.Length == 0) + { + return Array.Empty(); + } + + return references + .Where(static reference => !string.IsNullOrWhiteSpace(reference.Url)) + .Select(reference => new CertBundReferenceDto + { + Url = reference.Url!, + Label = reference.Label, + }) + .DistinctBy(static reference => reference.Url, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static IReadOnlyList MapProducts(CertBundDetailProduct[]? products) + { + if (products is null || products.Length == 0) + { + return Array.Empty(); + } + + return products + .Where(static product => !string.IsNullOrWhiteSpace(product.Vendor) || !string.IsNullOrWhiteSpace(product.Name)) + .Select(product => new CertBundProductDto + { + Vendor = product.Vendor, + Name = product.Name, + Versions = product.Versions, + }) + .ToArray(); + } +} diff --git a/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundDetailResponse.cs b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundDetailResponse.cs new file mode 100644 index 00000000..6b48697f --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundDetailResponse.cs @@ -0,0 +1,60 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Feedser.Source.CertBund.Internal; + +internal sealed record CertBundDetailResponse +{ + [JsonPropertyName("name")] + public string? Name { get; init; } + + [JsonPropertyName("title")] + public string? Title { get; init; } + + [JsonPropertyName("summary")] + public string? Summary { get; init; } + + [JsonPropertyName("description")] + public string? Description { get; init; } + + [JsonPropertyName("severity")] + public string? Severity { get; init; } + + [JsonPropertyName("language")] + public string? Language { get; init; } + + [JsonPropertyName("published")] + public DateTimeOffset? Published { get; init; } + + [JsonPropertyName("updated")] + public DateTimeOffset? Updated { get; init; } + + [JsonPropertyName("cveIds")] + public string[]? CveIds { get; init; } + + [JsonPropertyName("references")] + public CertBundDetailReference[]? References { get; init; } + + [JsonPropertyName("products")] + public CertBundDetailProduct[]? Products { get; init; } +} + +internal sealed record CertBundDetailReference +{ + [JsonPropertyName("url")] + public string? Url { get; init; } + + [JsonPropertyName("label")] + public string? Label { get; init; } +} + +internal sealed record CertBundDetailProduct +{ + [JsonPropertyName("vendor")] + public string? Vendor { get; init; } + + [JsonPropertyName("name")] + public string? Name { get; init; } + + [JsonPropertyName("versions")] + public string? Versions { get; init; } +} diff --git a/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundDiagnostics.cs b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundDiagnostics.cs new file mode 100644 index 00000000..dfc98bb3 --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundDiagnostics.cs @@ -0,0 +1,191 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.Metrics; + +namespace StellaOps.Feedser.Source.CertBund.Internal; + +/// +/// Emits OpenTelemetry counters and histograms for the CERT-Bund connector. +/// +public sealed class CertBundDiagnostics : IDisposable +{ + private const string MeterName = "StellaOps.Feedser.Source.CertBund"; + private const string MeterVersion = "1.0.0"; + + private readonly Meter _meter; + private readonly Counter _feedFetchAttempts; + private readonly Counter _feedFetchSuccess; + private readonly Counter _feedFetchFailures; + private readonly Histogram _feedItemCount; + private readonly Histogram _feedEnqueuedCount; + private readonly Histogram _feedCoverageDays; + private readonly Counter _detailFetchAttempts; + private readonly Counter _detailFetchSuccess; + private readonly Counter _detailFetchNotModified; + private readonly Counter _detailFetchFailures; + private readonly Counter _parseSuccess; + private readonly Counter _parseFailures; + private readonly Histogram _parseProductCount; + private readonly Histogram _parseCveCount; + private readonly Counter _mapSuccess; + private readonly Counter _mapFailures; + private readonly Histogram _mapPackageCount; + private readonly Histogram _mapAliasCount; + + public CertBundDiagnostics() + { + _meter = new Meter(MeterName, MeterVersion); + _feedFetchAttempts = _meter.CreateCounter( + name: "certbund.feed.fetch.attempts", + unit: "operations", + description: "Number of RSS feed load attempts."); + _feedFetchSuccess = _meter.CreateCounter( + name: "certbund.feed.fetch.success", + unit: "operations", + description: "Number of successful RSS feed loads."); + _feedFetchFailures = _meter.CreateCounter( + name: "certbund.feed.fetch.failures", + unit: "operations", + description: "Number of RSS feed load failures."); + _feedItemCount = _meter.CreateHistogram( + name: "certbund.feed.items.count", + unit: "items", + description: "Distribution of RSS item counts per fetch."); + _feedEnqueuedCount = _meter.CreateHistogram( + name: "certbund.feed.enqueued.count", + unit: "documents", + description: "Distribution of advisory documents enqueued per fetch."); + _feedCoverageDays = _meter.CreateHistogram( + name: "certbund.feed.coverage.days", + unit: "days", + description: "Coverage window in days between fetch time and the oldest published advisory in the feed."); + _detailFetchAttempts = _meter.CreateCounter( + name: "certbund.detail.fetch.attempts", + unit: "operations", + description: "Number of detail fetch attempts."); + _detailFetchSuccess = _meter.CreateCounter( + name: "certbund.detail.fetch.success", + unit: "operations", + description: "Number of detail fetches that persisted a document."); + _detailFetchNotModified = _meter.CreateCounter( + name: "certbund.detail.fetch.not_modified", + unit: "operations", + description: "Number of detail fetches returning HTTP 304."); + _detailFetchFailures = _meter.CreateCounter( + name: "certbund.detail.fetch.failures", + unit: "operations", + description: "Number of detail fetches that failed."); + _parseSuccess = _meter.CreateCounter( + name: "certbund.parse.success", + unit: "documents", + description: "Number of documents parsed into CERT-Bund DTOs."); + _parseFailures = _meter.CreateCounter( + name: "certbund.parse.failures", + unit: "documents", + description: "Number of documents that failed to parse."); + _parseProductCount = _meter.CreateHistogram( + name: "certbund.parse.products.count", + unit: "products", + description: "Distribution of product entries captured per advisory."); + _parseCveCount = _meter.CreateHistogram( + name: "certbund.parse.cve.count", + unit: "aliases", + description: "Distribution of CVE identifiers captured per advisory."); + _mapSuccess = _meter.CreateCounter( + name: "certbund.map.success", + unit: "advisories", + description: "Number of canonical advisories emitted by the mapper."); + _mapFailures = _meter.CreateCounter( + name: "certbund.map.failures", + unit: "advisories", + description: "Number of mapping failures."); + _mapPackageCount = _meter.CreateHistogram( + name: "certbund.map.affected.count", + unit: "packages", + description: "Distribution of affected packages emitted per advisory."); + _mapAliasCount = _meter.CreateHistogram( + name: "certbund.map.aliases.count", + unit: "aliases", + description: "Distribution of alias counts per advisory."); + } + + public void FeedFetchAttempt() => _feedFetchAttempts.Add(1); + + public void FeedFetchSuccess(int itemCount) + { + _feedFetchSuccess.Add(1); + if (itemCount >= 0) + { + _feedItemCount.Record(itemCount); + } + } + + public void FeedFetchFailure(string reason = "error") + => _feedFetchFailures.Add(1, ReasonTag(reason)); + + public void RecordFeedCoverage(double? coverageDays) + { + if (coverageDays is { } days && days >= 0) + { + _feedCoverageDays.Record(days); + } + } + + public void DetailFetchAttempt() => _detailFetchAttempts.Add(1); + + public void DetailFetchSuccess() => _detailFetchSuccess.Add(1); + + public void DetailFetchNotModified() => _detailFetchNotModified.Add(1); + + public void DetailFetchFailure(string reason = "error") + => _detailFetchFailures.Add(1, ReasonTag(reason)); + + public void DetailFetchEnqueued(int count) + { + if (count >= 0) + { + _feedEnqueuedCount.Record(count); + } + } + + public void ParseSuccess(int productCount, int cveCount) + { + _parseSuccess.Add(1); + + if (productCount >= 0) + { + _parseProductCount.Record(productCount); + } + + if (cveCount >= 0) + { + _parseCveCount.Record(cveCount); + } + } + + public void ParseFailure(string reason = "error") + => _parseFailures.Add(1, ReasonTag(reason)); + + public void MapSuccess(int affectedPackages, int aliasCount) + { + _mapSuccess.Add(1); + + if (affectedPackages >= 0) + { + _mapPackageCount.Record(affectedPackages); + } + + if (aliasCount >= 0) + { + _mapAliasCount.Record(aliasCount); + } + } + + public void MapFailure(string reason = "error") + => _mapFailures.Add(1, ReasonTag(reason)); + + private static KeyValuePair ReasonTag(string reason) + => new("reason", string.IsNullOrWhiteSpace(reason) ? "unknown" : reason.ToLowerInvariant()); + + public void Dispose() => _meter.Dispose(); +} diff --git a/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundDocumentMetadata.cs b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundDocumentMetadata.cs new file mode 100644 index 00000000..818e1237 --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundDocumentMetadata.cs @@ -0,0 +1,29 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Feedser.Source.CertBund.Internal; + +internal static class CertBundDocumentMetadata +{ + public static Dictionary CreateMetadata(CertBundFeedItem item) + { + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["certbund.advisoryId"] = item.AdvisoryId, + ["certbund.portalUri"] = item.PortalUri.ToString(), + ["certbund.published"] = item.Published.ToString("O"), + }; + + if (!string.IsNullOrWhiteSpace(item.Category)) + { + metadata["certbund.category"] = item.Category!; + } + + if (!string.IsNullOrWhiteSpace(item.Title)) + { + metadata["certbund.title"] = item.Title!; + } + + return metadata; + } +} diff --git a/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundFeedClient.cs b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundFeedClient.cs new file mode 100644 index 00000000..cbd1dbd5 --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundFeedClient.cs @@ -0,0 +1,143 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using System.Xml.Linq; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Feedser.Source.CertBund.Configuration; + +namespace StellaOps.Feedser.Source.CertBund.Internal; + +public sealed class CertBundFeedClient +{ + private readonly IHttpClientFactory _httpClientFactory; + private readonly CertBundOptions _options; + private readonly ILogger _logger; + private readonly SemaphoreSlim _bootstrapSemaphore = new(1, 1); + private volatile bool _bootstrapped; + + public CertBundFeedClient( + IHttpClientFactory httpClientFactory, + IOptions options, + ILogger logger) + { + _httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); + _options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options)); + _options.Validate(); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task> LoadAsync(CancellationToken cancellationToken) + { + var client = _httpClientFactory.CreateClient(CertBundOptions.HttpClientName); + await EnsureSessionAsync(client, cancellationToken).ConfigureAwait(false); + + using var request = new HttpRequestMessage(HttpMethod.Get, _options.FeedUri); + request.Headers.TryAddWithoutValidation("Accept", "application/rss+xml, application/xml;q=0.9, text/xml;q=0.8"); + using var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + var document = XDocument.Load(stream); + + var items = new List(); + foreach (var element in document.Descendants("item")) + { + cancellationToken.ThrowIfCancellationRequested(); + + var linkValue = element.Element("link")?.Value?.Trim(); + if (string.IsNullOrWhiteSpace(linkValue) || !Uri.TryCreate(linkValue, UriKind.Absolute, out var portalUri)) + { + continue; + } + + var advisoryId = TryExtractNameParameter(portalUri); + if (string.IsNullOrWhiteSpace(advisoryId)) + { + continue; + } + + var detailUri = _options.BuildDetailUri(advisoryId); + var pubDateText = element.Element("pubDate")?.Value; + var published = ParseDate(pubDateText); + var title = element.Element("title")?.Value?.Trim(); + var category = element.Element("category")?.Value?.Trim(); + + items.Add(new CertBundFeedItem(advisoryId, detailUri, portalUri, published, title, category)); + } + + return items; + } + + private async Task EnsureSessionAsync(HttpClient client, CancellationToken cancellationToken) + { + if (_bootstrapped) + { + return; + } + + await _bootstrapSemaphore.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_bootstrapped) + { + return; + } + + using var request = new HttpRequestMessage(HttpMethod.Get, _options.PortalBootstrapUri); + request.Headers.TryAddWithoutValidation("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"); + using var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + _bootstrapped = true; + } + finally + { + _bootstrapSemaphore.Release(); + } + } + + private static string? TryExtractNameParameter(Uri portalUri) + { + if (portalUri is null) + { + return null; + } + + var query = portalUri.Query; + if (string.IsNullOrEmpty(query)) + { + return null; + } + + var trimmed = query.TrimStart('?'); + foreach (var pair in trimmed.Split('&', StringSplitOptions.RemoveEmptyEntries)) + { + var separatorIndex = pair.IndexOf('='); + if (separatorIndex <= 0) + { + continue; + } + + var key = pair[..separatorIndex].Trim(); + if (!key.Equals("name", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + var value = pair[(separatorIndex + 1)..]; + return Uri.UnescapeDataString(value); + } + + return null; + } + + private static DateTimeOffset ParseDate(string? value) + => DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var parsed) + ? parsed + : DateTimeOffset.UtcNow; +} diff --git a/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundFeedItem.cs b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundFeedItem.cs new file mode 100644 index 00000000..a92374c9 --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundFeedItem.cs @@ -0,0 +1,11 @@ +namespace StellaOps.Feedser.Source.CertBund.Internal; + +using System; + +public sealed record CertBundFeedItem( + string AdvisoryId, + Uri DetailUri, + Uri PortalUri, + DateTimeOffset Published, + string? Title, + string? Category); diff --git a/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundMapper.cs b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundMapper.cs new file mode 100644 index 00000000..ff168ce2 --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/Internal/CertBundMapper.cs @@ -0,0 +1,168 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using StellaOps.Feedser.Models; +using StellaOps.Feedser.Storage.Mongo.Documents; + +namespace StellaOps.Feedser.Source.CertBund.Internal; + +internal static class CertBundMapper +{ + public static Advisory Map(CertBundAdvisoryDto dto, DocumentRecord document, DateTimeOffset recordedAt) + { + ArgumentNullException.ThrowIfNull(dto); + ArgumentNullException.ThrowIfNull(document); + + var aliases = BuildAliases(dto); + var references = BuildReferences(dto, recordedAt); + var packages = BuildPackages(dto, recordedAt); + var provenance = new AdvisoryProvenance( + CertBundConnectorPlugin.SourceName, + "advisory", + dto.AdvisoryId, + recordedAt, + new[] { ProvenanceFieldMasks.Advisory }); + + return new Advisory( + advisoryKey: dto.AdvisoryId, + title: dto.Title, + summary: dto.Summary, + language: dto.Language?.ToLowerInvariant() ?? "de", + published: dto.Published, + modified: dto.Modified, + severity: MapSeverity(dto.Severity), + exploitKnown: false, + aliases: aliases, + references: references, + affectedPackages: packages, + cvssMetrics: Array.Empty(), + provenance: new[] { provenance }); + } + + private static IReadOnlyList BuildAliases(CertBundAdvisoryDto dto) + { + var aliases = new List(capacity: 4) { dto.AdvisoryId }; + foreach (var cve in dto.CveIds) + { + if (!string.IsNullOrWhiteSpace(cve)) + { + aliases.Add(cve); + } + } + + return aliases + .Where(static alias => !string.IsNullOrWhiteSpace(alias)) + .Distinct(StringComparer.OrdinalIgnoreCase) + .OrderBy(static alias => alias, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static IReadOnlyList BuildReferences(CertBundAdvisoryDto dto, DateTimeOffset recordedAt) + { + var references = new List + { + new(dto.DetailUri.ToString(), "details", "cert-bund", null, new AdvisoryProvenance( + CertBundConnectorPlugin.SourceName, + "reference", + dto.DetailUri.ToString(), + recordedAt, + new[] { ProvenanceFieldMasks.References })) + }; + + foreach (var reference in dto.References) + { + if (string.IsNullOrWhiteSpace(reference.Url)) + { + continue; + } + + references.Add(new AdvisoryReference( + reference.Url, + kind: "reference", + sourceTag: "cert-bund", + summary: reference.Label, + provenance: new AdvisoryProvenance( + CertBundConnectorPlugin.SourceName, + "reference", + reference.Url, + recordedAt, + new[] { ProvenanceFieldMasks.References }))); + } + + return references + .DistinctBy(static reference => reference.Url, StringComparer.Ordinal) + .OrderBy(static reference => reference.Url, StringComparer.Ordinal) + .ToArray(); + } + + private static IReadOnlyList BuildPackages(CertBundAdvisoryDto dto, DateTimeOffset recordedAt) + { + if (dto.Products.Count == 0) + { + return Array.Empty(); + } + + var packages = new List(dto.Products.Count); + foreach (var product in dto.Products) + { + var vendor = Validation.TrimToNull(product.Vendor) ?? "Unspecified"; + var name = Validation.TrimToNull(product.Name); + var identifier = name is null ? vendor : $"{vendor} {name}"; + + var provenance = new AdvisoryProvenance( + CertBundConnectorPlugin.SourceName, + "package", + identifier, + recordedAt, + new[] { ProvenanceFieldMasks.AffectedPackages }); + + var ranges = string.IsNullOrWhiteSpace(product.Versions) + ? Array.Empty() + : new[] + { + new AffectedVersionRange( + rangeKind: "string", + introducedVersion: null, + fixedVersion: null, + lastAffectedVersion: null, + rangeExpression: product.Versions, + provenance: new AdvisoryProvenance( + CertBundConnectorPlugin.SourceName, + "package-range", + product.Versions, + recordedAt, + new[] { ProvenanceFieldMasks.VersionRanges })) + }; + + packages.Add(new AffectedPackage( + AffectedPackageTypes.Vendor, + identifier, + platform: null, + versionRanges: ranges, + statuses: Array.Empty(), + provenance: new[] { provenance }, + normalizedVersions: Array.Empty())); + } + + return packages + .DistinctBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase) + .OrderBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static string? MapSeverity(string? severity) + { + if (string.IsNullOrWhiteSpace(severity)) + { + return null; + } + + return severity.ToLowerInvariant() switch + { + "hoch" or "high" => "high", + "mittel" or "medium" => "medium", + "gering" or "low" => "low", + _ => severity.ToLowerInvariant(), + }; + } +} diff --git a/src/StellaOps.Feedser.Source.CertBund/Jobs.cs b/src/StellaOps.Feedser.Source.CertBund/Jobs.cs new file mode 100644 index 00000000..cedb948c --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/Jobs.cs @@ -0,0 +1,22 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Feedser.Core.Jobs; + +namespace StellaOps.Feedser.Source.CertBund; + +internal static class CertBundJobKinds +{ + public const string Fetch = "source:cert-bund:fetch"; +} + +internal sealed class CertBundFetchJob : IJob +{ + private readonly CertBundConnector _connector; + + public CertBundFetchJob(CertBundConnector connector) + => _connector = connector ?? throw new ArgumentNullException(nameof(connector)); + + public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken) + => _connector.FetchAsync(context.Services, cancellationToken); +} diff --git a/src/StellaOps.Feedser.Source.CertBund/README.md b/src/StellaOps.Feedser.Source.CertBund/README.md new file mode 100644 index 00000000..faefeda6 --- /dev/null +++ b/src/StellaOps.Feedser.Source.CertBund/README.md @@ -0,0 +1,39 @@ +# CERT-Bund Security Advisories – Connector Notes + +## Publication endpoints +- **RSS feed (latest 250 advisories)** – `https://wid.cert-bund.de/content/public/securityAdvisory/rss`. The feed refreshes quickly; the current window spans roughly 6 days of activity, so fetch jobs must run frequently to avoid churn. +- **Portal bootstrap** – `https://wid.cert-bund.de/portal/` is hit once per process start to prime the session (`client_config` cookie) before any API calls. +- **Detail API** – `https://wid.cert-bund.de/portal/api/securityadvisory?name=`. The connector reuses the bootstrapped `SocketsHttpHandler` so cookies and headers match the Angular SPA. Manual reproduction requires the same cookie container; otherwise the endpoint responds with the shell HTML document. + +## Telemetry +The OpenTelemetry meter is `StellaOps.Feedser.Source.CertBund`. Key instruments: + +| Metric | Type | Notes | +| --- | --- | --- | +| `certbund.feed.fetch.attempts` / `.success` / `.failures` | counter | Feed poll lifecycle. | +| `certbund.feed.items.count` | histogram | Items returned per RSS fetch. | +| `certbund.feed.enqueued.count` | histogram | Detail documents queued per cycle (post-dedupe, before truncation). | +| `certbund.feed.coverage.days` | histogram | Rolling window (fetch time − oldest published entry). Useful to alert when feed depth contracts. | +| `certbund.detail.fetch.*` | counter | Attempts, successes, HTTP 304, and failure counts; failures are tagged by reason (`skipped`, `exception`). | +| `certbund.parse.success` / `.failures` | counter | Parsing outcomes; histograms capture product and CVE counts. | +| `certbund.map.success` / `.failures` | counter | Canonical mapping results; histograms capture affected-package and alias volume. | + +Dashboards should chart coverage days and enqueued counts alongside fetch failures: sharp drops indicate the upstream window tightened or parsing stalled. + +## Logging signals +- `CERT-Bund fetch cycle: feed items …` summarises each RSS run (enqueued, already-known, HTTP 304, failures, coverage window). +- Parse and map stages log corresponding counts when work remains in the cursor. +- Errors include advisory/document identifiers to simplify replays. + +## Historical coverage +- RSS contains the newest **250** items (≈6 days at the current publication rate). The connector prunes the “known advisory” set to 512 IDs to avoid unbounded memory but retains enough headroom for short-term replay. +- Older advisories remain accessible through the same detail API (`WID-SEC--` identifiers). For deep backfills run a scripted sweep that queues historical IDs in descending order; the connector will persist any payloads that still resolve. Document these batches under source state comments so Merge/Docs can track provenance. + +## Locale & translation stance +- CERT-Bund publishes advisory titles and summaries **only in German** (language tag `de`). The connector preserves original casing/content and sets `Advisory.Language = "de"`. +- Operator guidance: + 1. Front-line analysts consuming Feedser data should maintain German literacy or rely on approved machine-translation pipelines. + 2. When mirroring advisories into English dashboards, store translations outside the canonical advisory payload to keep determinism. Suggested approach: create an auxiliary collection keyed by advisory ID with timestamped translated snippets. + 3. Offline Kit bundles must document that CERT-Bund content is untranslated to avoid surprise during audits. + +The Docs guild will surface the translation policy (retain German source, optionally layer operator-provided translations) in the broader i18n section; this README is the connector-level reference. diff --git a/src/StellaOps.Feedser.Source.CertBund/StellaOps.Feedser.Source.CertBund.csproj b/src/StellaOps.Feedser.Source.CertBund/StellaOps.Feedser.Source.CertBund.csproj index f7f2c154..f857e20d 100644 --- a/src/StellaOps.Feedser.Source.CertBund/StellaOps.Feedser.Source.CertBund.csproj +++ b/src/StellaOps.Feedser.Source.CertBund/StellaOps.Feedser.Source.CertBund.csproj @@ -6,11 +6,10 @@ enable - - - - - - - - + + + + + + + diff --git a/src/StellaOps.Feedser.Source.CertBund/TASKS.md b/src/StellaOps.Feedser.Source.CertBund/TASKS.md index 05db2b1a..0fb691ed 100644 --- a/src/StellaOps.Feedser.Source.CertBund/TASKS.md +++ b/src/StellaOps.Feedser.Source.CertBund/TASKS.md @@ -2,10 +2,11 @@ | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |FEEDCONN-CERTBUND-02-001 Research CERT-Bund advisory endpoints|BE-Conn-CERTBUND|Research|**DONE (2025-10-11)** – Confirmed public RSS at `https://wid.cert-bund.de/content/public/securityAdvisory/rss` (HTTP 200 w/out cookies), 250-item window, German titles/categories, and detail links pointing to Angular SPA. Captured header profile (no cache hints) and logged open item to discover the JSON API used by `portal` frontend.| -|FEEDCONN-CERTBUND-02-002 Fetch job & state persistence|BE-Conn-CERTBUND|Source.Common, Storage.Mongo|**TODO** – Implement fetcher with gzip support + `Accept: application/rss+xml`, store raw XML and derived SHA, and treat feed as append-only (no pagination). Cursor should rely on `pubDate` + advisory `link` to avoid duplicates; throttle to ≤1 fetch/5 min to match WID politeness.| -|FEEDCONN-CERTBUND-02-003 Parser/DTO implementation|BE-Conn-CERTBUND|Source.Common|**TODO** – RSS only carries synopsis; need secondary request to SPA JSON (`portal` app). Action: trace network for calls triggered when loading `WID-SEC-2025-2254` (likely `/portal/api/securityadvisory?name=`). Once endpoint known, map severity (``), summary HTML, affected products, and references.| -|FEEDCONN-CERTBUND-02-004 Canonical mapping & range primitives|BE-Conn-CERTBUND|Models|**TODO** – Map advisories into canonical records including aliases, references, affected packages, and vendor range primitives. Follow normalized range guidance in `../StellaOps.Feedser.Merge/RANGE_PRIMITIVES_COORDINATION.md`.
    2025-10-11 research trail: target JSON `[{"scheme":"semver","type":"range","min":"","minInclusive":true,"max":"","maxInclusive":false,"notes":"certbund:advisory-id"}]`; adjust scheme for firmware-style ranges when necessary.| -|FEEDCONN-CERTBUND-02-005 Regression fixtures & tests|QA|Testing|**TODO** – Add deterministic fetch/parse/map tests with fixtures; support `UPDATE_CERTBUND_FIXTURES=1`.| -|FEEDCONN-CERTBUND-02-006 Telemetry & documentation|DevEx|Docs|**TODO** – Add logging/metrics, document connector configuration, and update backlog when feature complete.| -|FEEDCONN-CERTBUND-02-007 Feed history & locale assessment|BE-Conn-CERTBUND|Research|**TODO** – Latest snapshot shows ~250 entries (~90 days). Need to confirm older advisories via HTML pagination (`/portal/wid/securityadvisory?page=`?) or JSON export, and document translation plan (titles/descriptions remain German).| -|FEEDCONN-CERTBUND-02-008 Session bootstrap & cookie strategy|BE-Conn-CERTBUND|Source.Common|**TODO** – SPA detail API likely demands session cookies (`AL_SESS-S`). Reproduce issuance flow (load `/portal/` once, grab cookies), decide on cached cookie store vs automated refresh, and ensure Offline Kit includes instructions for rehydrating trust stores if TLS chain changes.| +|FEEDCONN-CERTBUND-02-002 Fetch job & state persistence|BE-Conn-CERTBUND|Source.Common, Storage.Mongo|**DONE (2025-10-14)** – `CertBundConnector.FetchAsync` consumes RSS via session-bootstrapped client, stores per-advisory JSON documents with metadata + SHA, throttles detail requests, and maintains cursor state (pending docs/mappings, known advisory IDs, last published).| +|FEEDCONN-CERTBUND-02-003 Parser/DTO implementation|BE-Conn-CERTBUND|Source.Common|**DONE (2025-10-14)** – Detail JSON piped through `CertBundDetailParser` (raw DOM sanitised to HTML), capturing severity, CVEs, product list, and references into DTO records (`cert-bund.detail.v1`).| +|FEEDCONN-CERTBUND-02-004 Canonical mapping & range primitives|BE-Conn-CERTBUND|Models|**DONE (2025-10-14)** – `CertBundMapper` emits canonical advisories (aliases, references, vendor package ranges, provenance) with severity normalisation and deterministic ordering.| +|FEEDCONN-CERTBUND-02-005 Regression fixtures & tests|QA|Testing|**DONE (2025-10-14)** – Added `StellaOps.Feedser.Source.CertBund.Tests` covering fetch→parse→map against canned RSS/JSON fixtures; integration harness uses Mongo2Go + canned HTTP handler; fixtures regenerate via `UPDATE_CERTBUND_FIXTURES=1`.| +|FEEDCONN-CERTBUND-02-006 Telemetry & documentation|DevEx|Docs|**DONE (2025-10-15)** – Added `CertBundDiagnostics` (meter `StellaOps.Feedser.Source.CertBund`) with fetch/parse/map counters + histograms, recorded coverage days, wired stage summary logs, and published the ops runbook (`docs/ops/feedser-certbund-operations.md`).| +|FEEDCONN-CERTBUND-02-007 Feed history & locale assessment|BE-Conn-CERTBUND|Research|**DONE (2025-10-15)** – Measured RSS retention (~6 days/≈250 items), captured connector-driven backfill guidance in the runbook, and aligned locale guidance (preserve `language=de`, Docs glossary follow-up). **Next:** coordinate with Tools to land the state-seeding helper so scripted backfills replace manual Mongo tweaks.| +|FEEDCONN-CERTBUND-02-008 Session bootstrap & cookie strategy|BE-Conn-CERTBUND|Source.Common|**DONE (2025-10-14)** – Feed client primes the portal session (cookie container via `SocketsHttpHandler`), shares cookies across detail requests, and documents bootstrap behaviour in options (`PortalBootstrapUri`).| +|FEEDCONN-CERTBUND-02-009 Offline Kit export packaging|BE-Conn-CERTBUND, Docs|Offline Kit|**TODO** – Capture JSON search/export snapshots (per-year splits), generate manifest fields (`source`,`from`,`to`,`sha256`,`capturedAt`), and update Offline Kit docs so air-gapped deployments can seed historical CERT-Bund advisories without live fetching. **Remark:** follow the interim workflow documented in `docs/ops/feedser-certbund-operations.md` §3.3 until the packaged artefacts ship.| diff --git a/src/StellaOps.Feedser.Source.Common.Tests/Common/HtmlContentSanitizerTests.cs b/src/StellaOps.Feedser.Source.Common.Tests/Common/HtmlContentSanitizerTests.cs index 4415c2c7..2d1eee3b 100644 --- a/src/StellaOps.Feedser.Source.Common.Tests/Common/HtmlContentSanitizerTests.cs +++ b/src/StellaOps.Feedser.Source.Common.Tests/Common/HtmlContentSanitizerTests.cs @@ -19,13 +19,25 @@ public sealed class HtmlContentSanitizerTests } [Fact] - public void Sanitize_PreservesBasicFormatting() - { - var sanitizer = new HtmlContentSanitizer(); - var input = "

    Hello world

    "; - - var sanitized = sanitizer.Sanitize(input); - - Assert.Equal("

    Hello world

    ", sanitized); - } -} + public void Sanitize_PreservesBasicFormatting() + { + var sanitizer = new HtmlContentSanitizer(); + var input = "

    Hello world

    "; + + var sanitized = sanitizer.Sanitize(input); + + Assert.Equal("

    Hello world

    ", sanitized); + } + + [Fact] + public void Sanitize_PreservesHeadingsAndLists() + { + var sanitizer = new HtmlContentSanitizer(); + var input = "

    Affected Products

    • Example One
    "; + + var sanitized = sanitizer.Sanitize(input); + + Assert.Contains("

    Affected Products

    ", sanitized, StringComparison.Ordinal); + Assert.Contains("
    • Example One
    ", sanitized, StringComparison.Ordinal); + } +} diff --git a/src/StellaOps.Feedser.Source.Common.Tests/Common/SourceHttpClientBuilderTests.cs b/src/StellaOps.Feedser.Source.Common.Tests/Common/SourceHttpClientBuilderTests.cs index 70172b39..188c1db7 100644 --- a/src/StellaOps.Feedser.Source.Common.Tests/Common/SourceHttpClientBuilderTests.cs +++ b/src/StellaOps.Feedser.Source.Common.Tests/Common/SourceHttpClientBuilderTests.cs @@ -1,3 +1,4 @@ +using System; using System.Collections.Generic; using System.IO; using System.Net; @@ -52,6 +53,56 @@ public sealed class SourceHttpClientBuilderTests Assert.NotNull(capturedHandler); } + [Fact] + public void AddSourceHttpClient_LoadsProxyConfiguration() + { + var services = new ServiceCollection(); + services.AddLogging(); + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary + { + [$"feedser:httpClients:source.icscisa:{ProxySection}:{ProxyAddressKey}"] = "http://proxy.local:8080", + [$"feedser:httpClients:source.icscisa:{ProxySection}:{ProxyBypassOnLocalKey}"] = "false", + [$"feedser:httpClients:source.icscisa:{ProxySection}:{ProxyBypassListKey}:0"] = "localhost", + [$"feedser:httpClients:source.icscisa:{ProxySection}:{ProxyBypassListKey}:1"] = "127.0.0.1", + [$"feedser:httpClients:source.icscisa:{ProxySection}:{ProxyUseDefaultCredentialsKey}"] = "false", + [$"feedser:httpClients:source.icscisa:{ProxySection}:{ProxyUsernameKey}"] = "svc-feedser", + [$"feedser:httpClients:source.icscisa:{ProxySection}:{ProxyPasswordKey}"] = "s3cr3t!", + }) + .Build(); + + services.AddSingleton(configuration); + + services.AddSourceHttpClient("source.icscisa", (_, options) => + { + options.AllowedHosts.Add("content.govdelivery.com"); + options.ProxyAddress = new Uri("http://configure.local:9000"); + }); + + using var provider = services.BuildServiceProvider(); + _ = provider.GetRequiredService().CreateClient("source.icscisa"); + + var resolvedConfiguration = provider.GetRequiredService(); + var proxySection = resolvedConfiguration + .GetSection("feedser") + .GetSection("httpClients") + .GetSection("source.icscisa") + .GetSection("proxy"); + Assert.True(proxySection.Exists()); + Assert.Equal("http://proxy.local:8080", proxySection[ProxyAddressKey]); + + var configuredOptions = provider.GetRequiredService>().Get("source.icscisa"); + Assert.NotNull(configuredOptions.ProxyAddress); + Assert.Equal(new Uri("http://proxy.local:8080"), configuredOptions.ProxyAddress); + Assert.False(configuredOptions.ProxyBypassOnLocal); + Assert.Contains("localhost", configuredOptions.ProxyBypassList, StringComparer.OrdinalIgnoreCase); + Assert.Contains("127.0.0.1", configuredOptions.ProxyBypassList); + Assert.False(configuredOptions.ProxyUseDefaultCredentials); + Assert.Equal("svc-feedser", configuredOptions.ProxyUsername); + Assert.Equal("s3cr3t!", configuredOptions.ProxyPassword); + } + [Fact] public void AddSourceHttpClient_UsesConfigurationToBypassValidation() { @@ -234,4 +285,11 @@ public sealed class SourceHttpClientBuilderTests private const string AllowInvalidKey = "allowInvalidCertificates"; private const string TrustedRootPathsKey = "trustedRootPaths"; private const string OfflineRootKey = "offlineRoot"; + private const string ProxySection = "proxy"; + private const string ProxyAddressKey = "address"; + private const string ProxyBypassOnLocalKey = "bypassOnLocal"; + private const string ProxyBypassListKey = "bypassList"; + private const string ProxyUseDefaultCredentialsKey = "useDefaultCredentials"; + private const string ProxyUsernameKey = "username"; + private const string ProxyPasswordKey = "password"; } diff --git a/src/StellaOps.Feedser.Source.Common/Html/HtmlContentSanitizer.cs b/src/StellaOps.Feedser.Source.Common/Html/HtmlContentSanitizer.cs index 61a6ecad..15d5a8e2 100644 --- a/src/StellaOps.Feedser.Source.Common/Html/HtmlContentSanitizer.cs +++ b/src/StellaOps.Feedser.Source.Common/Html/HtmlContentSanitizer.cs @@ -12,11 +12,11 @@ namespace StellaOps.Feedser.Source.Common.Html; public sealed class HtmlContentSanitizer { private static readonly HashSet AllowedElements = new(StringComparer.OrdinalIgnoreCase) - { - "a", "abbr", "b", "body", "blockquote", "br", "code", "dd", "div", "dl", "dt", - "em", "html", "i", "li", "ol", "p", "pre", "s", "small", "span", - "strong", "sub", "sup", "table", "tbody", "td", "th", "thead", "tr", "ul" - }; + { + "a", "abbr", "article", "b", "body", "blockquote", "br", "code", "dd", "div", "dl", "dt", + "em", "h1", "h2", "h3", "h4", "h5", "h6", "html", "i", "li", "ol", "p", "pre", "s", + "section", "small", "span", "strong", "sub", "sup", "table", "tbody", "td", "th", "thead", "tr", "ul" + }; private static readonly HashSet UrlAttributes = new(StringComparer.OrdinalIgnoreCase) { diff --git a/src/StellaOps.Feedser.Source.Common/Http/ServiceCollectionExtensions.cs b/src/StellaOps.Feedser.Source.Common/Http/ServiceCollectionExtensions.cs index 9b168a77..eea1eb7f 100644 --- a/src/StellaOps.Feedser.Source.Common/Http/ServiceCollectionExtensions.cs +++ b/src/StellaOps.Feedser.Source.Common/Http/ServiceCollectionExtensions.cs @@ -5,31 +5,31 @@ using System.Security.Cryptography.X509Certificates; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Options; using StellaOps.Feedser.Source.Common.Xml; - -namespace StellaOps.Feedser.Source.Common.Http; - -public static class ServiceCollectionExtensions -{ - /// - /// Registers a named HTTP client configured for a source connector with allowlisted hosts and sensible defaults. - /// - public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action configure) - => services.AddSourceHttpClient(name, (_, options) => configure(options)); - - public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action configure) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentException.ThrowIfNullOrEmpty(name); - ArgumentNullException.ThrowIfNull(configure); - + +namespace StellaOps.Feedser.Source.Common.Http; + +public static class ServiceCollectionExtensions +{ + /// + /// Registers a named HTTP client configured for a source connector with allowlisted hosts and sensible defaults. + /// + public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action configure) + => services.AddSourceHttpClient(name, (_, options) => configure(options)); + + public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action configure) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentException.ThrowIfNullOrEmpty(name); + ArgumentNullException.ThrowIfNull(configure); + services.AddOptions(name).Configure((options, sp) => { configure(sp, options); SourceHttpClientConfigurationBinder.Apply(sp, name, options); }); - - return services - .AddHttpClient(name) + + return services + .AddHttpClient(name) .ConfigureHttpClient((sp, client) => { var options = sp.GetRequiredService>().Get(name); @@ -60,6 +60,7 @@ public static class ServiceCollectionExtensions EnableMultipleHttp2Connections = options.EnableMultipleHttp2Connections, }; options.ConfigureHandler?.Invoke(handler); + ApplyProxySettings(handler, options); if (options.ServerCertificateCustomValidation is not null) { @@ -69,7 +70,7 @@ public static class ServiceCollectionExtensions X509Certificate2? disposable = null; if (certToValidate is null && certificate is not null) { - disposable = new X509Certificate2(certificate); + disposable = X509CertificateLoader.LoadCertificate(certificate.Export(X509ContentType.Cert)); certToValidate = disposable; } @@ -109,7 +110,7 @@ public static class ServiceCollectionExtensions { if (certToValidate is null) { - disposable = new X509Certificate2(certificate); + disposable = X509CertificateLoader.LoadCertificate(certificate.Export(X509ContentType.Cert)); certToValidate = disposable; } @@ -143,24 +144,54 @@ public static class ServiceCollectionExtensions { var options = sp.GetRequiredService>().Get(name).Clone(); return new AllowlistedHttpMessageHandler(options); - }); - } - - /// - /// Registers shared helpers used by source connectors. - /// - public static IServiceCollection AddSourceCommon(this IServiceCollection services) - { - ArgumentNullException.ThrowIfNull(services); - - services.AddSingleton(); - services.AddSingleton(sp => sp.GetRequiredService()); - services.AddSingleton(); - services.AddSingleton(sp => sp.GetRequiredService()); - services.AddSingleton(); - services.AddSingleton(); - services.AddSingleton(); - - return services; - } -} + }); + } + + /// + /// Registers shared helpers used by source connectors. + /// + public static IServiceCollection AddSourceCommon(this IServiceCollection services) + { + ArgumentNullException.ThrowIfNull(services); + + services.AddSingleton(); + services.AddSingleton(sp => sp.GetRequiredService()); + services.AddSingleton(); + services.AddSingleton(sp => sp.GetRequiredService()); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + + return services; + } + + private static void ApplyProxySettings(SocketsHttpHandler handler, SourceHttpClientOptions options) + { + if (options.ProxyAddress is null) + { + return; + } + + var proxy = new WebProxy(options.ProxyAddress) + { + BypassProxyOnLocal = options.ProxyBypassOnLocal, + UseDefaultCredentials = options.ProxyUseDefaultCredentials, + }; + + if (options.ProxyBypassList.Count > 0) + { + proxy.BypassList = options.ProxyBypassList.ToArray(); + } + + if (!options.ProxyUseDefaultCredentials + && !string.IsNullOrWhiteSpace(options.ProxyUsername)) + { + proxy.Credentials = new NetworkCredential( + options.ProxyUsername, + options.ProxyPassword ?? string.Empty); + } + + handler.Proxy = proxy; + handler.UseProxy = true; + } +} diff --git a/src/StellaOps.Feedser.Source.Common/Http/SourceHttpClientConfigurationBinder.cs b/src/StellaOps.Feedser.Source.Common/Http/SourceHttpClientConfigurationBinder.cs index dc47b459..f4258832 100644 --- a/src/StellaOps.Feedser.Source.Common/Http/SourceHttpClientConfigurationBinder.cs +++ b/src/StellaOps.Feedser.Source.Common/Http/SourceHttpClientConfigurationBinder.cs @@ -19,6 +19,13 @@ internal static class SourceHttpClientConfigurationBinder private const string HttpSection = "http"; private const string AllowInvalidKey = "allowInvalidCertificates"; private const string TrustedRootPathsKey = "trustedRootPaths"; + private const string ProxySection = "proxy"; + private const string ProxyAddressKey = "address"; + private const string ProxyBypassOnLocalKey = "bypassOnLocal"; + private const string ProxyBypassListKey = "bypassList"; + private const string ProxyUseDefaultCredentialsKey = "useDefaultCredentials"; + private const string ProxyUsernameKey = "username"; + private const string ProxyPasswordKey = "password"; private const string OfflineRootKey = "offlineRoot"; private const string OfflineRootEnvironmentVariable = "FEEDSER_OFFLINE_ROOT"; @@ -130,6 +137,18 @@ internal static class SourceHttpClientConfigurationBinder ?? rootConfiguration.GetSection(FeedserSection).GetValue(OfflineRootKey) ?? Environment.GetEnvironmentVariable(OfflineRootEnvironmentVariable); + ApplyTrustedRoots(section, offlineRoot, hostEnvironment, clientName, options, logger); + ApplyProxyConfiguration(section, clientName, options, logger); + } + + private static void ApplyTrustedRoots( + IConfigurationSection section, + string? offlineRoot, + IHostEnvironment? hostEnvironment, + string clientName, + SourceHttpClientOptions options, + ILogger? logger) + { var trustedRootSection = section.GetSection(TrustedRootPathsKey); if (!trustedRootSection.Exists()) { @@ -179,6 +198,76 @@ internal static class SourceHttpClientConfigurationBinder } } + private static void ApplyProxyConfiguration( + IConfigurationSection section, + string clientName, + SourceHttpClientOptions options, + ILogger? logger) + { + var proxySection = section.GetSection(ProxySection); + if (!proxySection.Exists()) + { + return; + } + + var address = proxySection.GetValue(ProxyAddressKey); + if (!string.IsNullOrWhiteSpace(address)) + { + if (Uri.TryCreate(address, UriKind.Absolute, out var uri)) + { + options.ProxyAddress = uri; + } + else + { + logger?.LogWarning( + "Source HTTP client '{ClientName}' has invalid proxy address '{ProxyAddress}'.", + clientName, + address); + } + } + + var bypassOnLocal = proxySection.GetValue(ProxyBypassOnLocalKey); + if (bypassOnLocal.HasValue) + { + options.ProxyBypassOnLocal = bypassOnLocal.Value; + } + + var bypassListSection = proxySection.GetSection(ProxyBypassListKey); + if (bypassListSection.Exists()) + { + var entries = bypassListSection.Get(); + options.ProxyBypassList.Clear(); + if (entries is not null) + { + foreach (var entry in entries) + { + if (!string.IsNullOrWhiteSpace(entry)) + { + options.ProxyBypassList.Add(entry.Trim()); + } + } + } + } + + var useDefaultCredentials = proxySection.GetValue(ProxyUseDefaultCredentialsKey); + if (useDefaultCredentials.HasValue) + { + options.ProxyUseDefaultCredentials = useDefaultCredentials.Value; + } + + var username = proxySection.GetValue(ProxyUsernameKey); + if (!string.IsNullOrWhiteSpace(username)) + { + options.ProxyUsername = username.Trim(); + } + + var password = proxySection.GetValue(ProxyPasswordKey); + if (!string.IsNullOrWhiteSpace(password)) + { + options.ProxyPassword = password; + } + } + private static string ResolvePath(string path, string? offlineRoot, IHostEnvironment? hostEnvironment) { if (Path.IsPathRooted(path)) @@ -214,25 +303,23 @@ internal static class SourceHttpClientConfigurationBinder if (collection.Count > 0) { -#pragma warning disable SYSLIB0057 foreach (var certificate in collection) { - certificates.Add(new X509Certificate2(certificate.Export(X509ContentType.Cert))); + certificates.Add(certificate.CopyWithPrivateKeyIfAvailable()); } -#pragma warning restore SYSLIB0057 } else { -#pragma warning disable SYSLIB0057 certificates.Add(X509Certificate2.CreateFromPemFile(path)); -#pragma warning restore SYSLIB0057 } } else { -#pragma warning disable SYSLIB0057 - certificates.Add(new X509Certificate2(path)); -#pragma warning restore SYSLIB0057 + // Use X509CertificateLoader to load certificates from PKCS#12 files (.pfx, .p12, etc.) + var certificate = System.Security.Cryptography.X509Certificates.X509CertificateLoader.LoadPkcs12( + File.ReadAllBytes(path), + password: null); + certificates.Add(certificate); } return certificates; @@ -251,6 +338,23 @@ internal static class SourceHttpClientConfigurationBinder return; } - options.TrustedRootCertificates.Add(new X509Certificate2(certificate.Export(X509ContentType.Cert))); + options.TrustedRootCertificates.Add(certificate); + } + + // Helper extension method to copy certificate (preserves private key if present) + private static X509Certificate2 CopyWithPrivateKeyIfAvailable(this X509Certificate2 certificate) + { + // In .NET 9+, use X509CertificateLoader instead of obsolete constructors + if (certificate.HasPrivateKey) + { + // Export with private key and re-import using X509CertificateLoader + var exported = certificate.Export(X509ContentType.Pkcs12); + return X509CertificateLoader.LoadPkcs12(exported, password: null); + } + else + { + // For certificates without private keys, load from raw data + return X509CertificateLoader.LoadCertificate(certificate.RawData); + } } } diff --git a/src/StellaOps.Feedser.Source.Common/Http/SourceHttpClientOptions.cs b/src/StellaOps.Feedser.Source.Common/Http/SourceHttpClientOptions.cs index 5dcfc810..b9297d04 100644 --- a/src/StellaOps.Feedser.Source.Common/Http/SourceHttpClientOptions.cs +++ b/src/StellaOps.Feedser.Source.Common/Http/SourceHttpClientOptions.cs @@ -69,6 +69,36 @@ public sealed class SourceHttpClientOptions /// public Action? ConfigureHandler { get; set; } + /// + /// Optional proxy address used for outbound requests. + /// + public Uri? ProxyAddress { get; set; } + + /// + /// Indicates whether the proxy should be bypassed for local addresses. Defaults to true. + /// + public bool ProxyBypassOnLocal { get; set; } = true; + + /// + /// Optional explicit bypass list applied to the proxy. + /// + public IList ProxyBypassList { get; } = new List(); + + /// + /// Indicates whether the default credentials should be used for the proxy. + /// + public bool ProxyUseDefaultCredentials { get; set; } + + /// + /// Optional proxy username. + /// + public string? ProxyUsername { get; set; } + + /// + /// Optional proxy password. + /// + public string? ProxyPassword { get; set; } + /// /// Gets or sets a value indicating whether server certificate validation should be bypassed. /// @@ -105,6 +135,11 @@ public sealed class SourceHttpClientOptions ConfigureHandler = ConfigureHandler, AllowInvalidServerCertificates = AllowInvalidServerCertificates, ServerCertificateCustomValidation = ServerCertificateCustomValidation, + ProxyAddress = ProxyAddress, + ProxyBypassOnLocal = ProxyBypassOnLocal, + ProxyUseDefaultCredentials = ProxyUseDefaultCredentials, + ProxyUsername = ProxyUsername, + ProxyPassword = ProxyPassword, }; foreach (var host in _allowedHosts) @@ -122,6 +157,11 @@ public sealed class SourceHttpClientOptions clone.TrustedRootCertificates.Add(certificate); } + foreach (var entry in ProxyBypassList) + { + clone.ProxyBypassList.Add(entry); + } + return clone; } diff --git a/src/StellaOps.Feedser.Source.Common/TASKS.md b/src/StellaOps.Feedser.Source.Common/TASKS.md index 5363148b..5ce78e95 100644 --- a/src/StellaOps.Feedser.Source.Common/TASKS.md +++ b/src/StellaOps.Feedser.Source.Common/TASKS.md @@ -16,3 +16,4 @@ |Allow per-request Accept header overrides|BE-Conn-Shared|Source.Common|**DONE** – `SourceFetchRequest.AcceptHeaders` honored by `SourceFetchService` plus unit tests for overrides.| |FEEDCONN-SHARED-HTTP2-001 HTTP version fallback policy|BE-Conn-Shared, Source.Common|Source.Common|**DONE (2025-10-11)** – `AddSourceHttpClient` now honours per-connector HTTP version/ policy, exposes handler customisation, and defaults to downgrade-friendly settings; unit tests cover handler configuration hook.| |FEEDCONN-SHARED-TLS-001 Sovereign trust store support|BE-Conn-Shared, Ops|Source.Common|**DONE (2025-10-11)** – `SourceHttpClientOptions` now exposes `TrustedRootCertificates`, `ServerCertificateCustomValidation`, and `AllowInvalidServerCertificates`, and `AddSourceHttpClient` runs the shared configuration binder so connectors can pull `feedser:httpClients|sources::http` settings (incl. Offline Kit relative PEM paths via `feedser:offline:root`). Tests cover handler wiring. Ops follow-up: package RU trust roots for Offline Kit distribution.| +|FEEDCONN-SHARED-STATE-003 Source state seeding helper|Tools Guild, BE-Conn-MSRC|Tools|**TODO (2025-10-15)** – Provide a reusable CLI/utility to seed `pendingDocuments`/`pendingMappings` for connectors (MSRC backfills require scripted CVRF + detail injection). Coordinate with MSRC team for expected JSON schema and handoff once prototype lands.| diff --git a/src/StellaOps.Feedser.Source.Cve.Tests/Cve/CveConnectorTests.cs b/src/StellaOps.Feedser.Source.Cve.Tests/Cve/CveConnectorTests.cs index 3193153c..6736d027 100644 --- a/src/StellaOps.Feedser.Source.Cve.Tests/Cve/CveConnectorTests.cs +++ b/src/StellaOps.Feedser.Source.Cve.Tests/Cve/CveConnectorTests.cs @@ -1,3 +1,5 @@ +using System.Diagnostics.Metrics; +using System.IO; using System.Net; using System.Net.Http; using System.Text; @@ -11,11 +13,13 @@ using StellaOps.Feedser.Models; using StellaOps.Feedser.Source.Common.Fetch; using StellaOps.Feedser.Source.Common.Testing; using StellaOps.Feedser.Source.Cve.Configuration; +using StellaOps.Feedser.Source.Cve.Internal; using StellaOps.Feedser.Testing; using StellaOps.Feedser.Storage.Mongo; using StellaOps.Feedser.Storage.Mongo.Advisories; using StellaOps.Feedser.Storage.Mongo.Documents; using StellaOps.Feedser.Storage.Mongo.Dtos; +using Xunit.Abstractions; namespace StellaOps.Feedser.Source.Cve.Tests; @@ -23,11 +27,13 @@ namespace StellaOps.Feedser.Source.Cve.Tests; public sealed class CveConnectorTests : IAsyncLifetime { private readonly MongoIntegrationFixture _fixture; + private readonly ITestOutputHelper _output; private ConnectorTestHarness? _harness; - public CveConnectorTests(MongoIntegrationFixture fixture) + public CveConnectorTests(MongoIntegrationFixture fixture, ITestOutputHelper output) { _fixture = fixture; + _output = output; } [Fact] @@ -58,12 +64,38 @@ public sealed class CveConnectorTests : IAsyncLifetime return new HttpResponseMessage(HttpStatusCode.NotFound); }); + var metrics = new Dictionary(StringComparer.Ordinal); + using var listener = new MeterListener + { + InstrumentPublished = (instrument, meterListener) => + { + if (instrument.Meter.Name == CveDiagnostics.MeterName) + { + meterListener.EnableMeasurementEvents(instrument); + } + } + }; + listener.SetMeasurementEventCallback((instrument, value, tags, state) => + { + if (metrics.TryGetValue(instrument.Name, out var existing)) + { + metrics[instrument.Name] = existing + value; + } + else + { + metrics[instrument.Name] = value; + } + }); + listener.Start(); + var connector = new CveConnectorPlugin().Create(harness.ServiceProvider); await connector.FetchAsync(harness.ServiceProvider, CancellationToken.None); await connector.ParseAsync(harness.ServiceProvider, CancellationToken.None); await connector.MapAsync(harness.ServiceProvider, CancellationToken.None); + listener.Dispose(); + var advisoryStore = harness.ServiceProvider.GetRequiredService(); var advisory = await advisoryStore.FindAsync("CVE-2024-0001", CancellationToken.None); Assert.NotNull(advisory); @@ -80,6 +112,54 @@ public sealed class CveConnectorTests : IAsyncLifetime Assert.Equal(expected, snapshot); harness.Handler.AssertNoPendingResponses(); + + _output.WriteLine("CVE connector smoke metrics:"); + foreach (var entry in metrics.OrderBy(static pair => pair.Key, StringComparer.Ordinal)) + { + _output.WriteLine($" {entry.Key} = {entry.Value}"); + } + } + + [Fact] + public async Task FetchWithoutCredentials_SeedsFromDirectory() + { + var initialTime = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero); + var projectRoot = Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", "..")); + var repositoryRoot = Path.GetFullPath(Path.Combine(projectRoot, "..", "..")); + var seedDirectory = Path.Combine(repositoryRoot, "seed-data", "cve", "2025-10-15"); + Assert.True(Directory.Exists(seedDirectory), $"Seed directory '{seedDirectory}' was not found."); + + await using var harness = new ConnectorTestHarness(_fixture, initialTime, CveOptions.HttpClientName); + await harness.EnsureServiceProviderAsync(services => + { + services.AddLogging(builder => + { + builder.ClearProviders(); + builder.AddProvider(new TestOutputLoggerProvider(_output, LogLevel.Information)); + builder.SetMinimumLevel(LogLevel.Information); + }); + services.AddCveConnector(options => + { + options.BaseEndpoint = new Uri("https://cve.test/api/", UriKind.Absolute); + options.SeedDirectory = seedDirectory; + options.PageSize = 5; + options.MaxPagesPerFetch = 1; + options.InitialBackfill = TimeSpan.FromDays(30); + options.RequestDelay = TimeSpan.Zero; + }); + }); + + var connector = new CveConnectorPlugin().Create(harness.ServiceProvider); + await connector.FetchAsync(harness.ServiceProvider, CancellationToken.None); + + Assert.Empty(harness.Handler.Requests); + + var advisoryStore = harness.ServiceProvider.GetRequiredService(); + var advisories = await advisoryStore.GetRecentAsync(10, CancellationToken.None); + var keys = advisories.Select(advisory => advisory.AdvisoryKey).ToArray(); + + Assert.Contains("CVE-2024-0001", keys); + Assert.Contains("CVE-2024-4567", keys); } private async Task EnsureHarnessAsync(DateTimeOffset initialTime) @@ -92,7 +172,12 @@ public sealed class CveConnectorTests : IAsyncLifetime var harness = new ConnectorTestHarness(_fixture, initialTime, CveOptions.HttpClientName); await harness.EnsureServiceProviderAsync(services => { - services.AddLogging(builder => builder.AddProvider(NullLoggerProvider.Instance)); + services.AddLogging(builder => + { + builder.ClearProviders(); + builder.AddProvider(new TestOutputLoggerProvider(_output, LogLevel.Information)); + builder.SetMinimumLevel(LogLevel.Information); + }); services.AddCveConnector(options => { options.BaseEndpoint = new Uri("https://cve.test/api/", UriKind.Absolute); @@ -127,4 +212,46 @@ public sealed class CveConnectorTests : IAsyncLifetime await _harness.DisposeAsync(); } } + + private sealed class TestOutputLoggerProvider : ILoggerProvider + { + private readonly ITestOutputHelper _output; + private readonly LogLevel _minLevel; + + public TestOutputLoggerProvider(ITestOutputHelper output, LogLevel minLevel) + { + _output = output; + _minLevel = minLevel; + } + + public ILogger CreateLogger(string categoryName) => new TestOutputLogger(_output, _minLevel); + + public void Dispose() + { + } + + private sealed class TestOutputLogger : ILogger + { + private readonly ITestOutputHelper _output; + private readonly LogLevel _minLevel; + + public TestOutputLogger(ITestOutputHelper output, LogLevel minLevel) + { + _output = output; + _minLevel = minLevel; + } + + public IDisposable BeginScope(TState state) where TState : notnull => NullLogger.Instance.BeginScope(state); + + public bool IsEnabled(LogLevel logLevel) => logLevel >= _minLevel; + + public void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter) + { + if (IsEnabled(logLevel)) + { + _output.WriteLine(formatter(state, exception)); + } + } + } + } } diff --git a/src/StellaOps.Feedser.Source.Cve/Configuration/CveOptions.cs b/src/StellaOps.Feedser.Source.Cve/Configuration/CveOptions.cs index 8d335291..3a48f133 100644 --- a/src/StellaOps.Feedser.Source.Cve/Configuration/CveOptions.cs +++ b/src/StellaOps.Feedser.Source.Cve/Configuration/CveOptions.cs @@ -1,5 +1,6 @@ using System; using System.Diagnostics.CodeAnalysis; +using System.IO; namespace StellaOps.Feedser.Source.Cve.Configuration; @@ -24,6 +25,11 @@ public sealed class CveOptions /// public string ApiKey { get; set; } = string.Empty; + /// + /// Optional path containing seed CVE JSON documents used when live credentials are unavailable. + /// + public string? SeedDirectory { get; set; } + /// /// Results fetched per page when querying CVE Services. Valid range 1-500. /// @@ -49,7 +55,7 @@ public sealed class CveOptions /// public TimeSpan FailureBackoff { get; set; } = TimeSpan.FromMinutes(10); - [MemberNotNull(nameof(BaseEndpoint), nameof(ApiOrg), nameof(ApiUser), nameof(ApiKey))] + [MemberNotNull(nameof(BaseEndpoint))] public void Validate() { if (BaseEndpoint is null || !BaseEndpoint.IsAbsoluteUri) @@ -57,21 +63,36 @@ public sealed class CveOptions throw new InvalidOperationException("BaseEndpoint must be an absolute URI."); } - if (string.IsNullOrWhiteSpace(ApiOrg)) + var hasCredentials = !string.IsNullOrWhiteSpace(ApiOrg) + && !string.IsNullOrWhiteSpace(ApiUser) + && !string.IsNullOrWhiteSpace(ApiKey); + var hasSeedDirectory = !string.IsNullOrWhiteSpace(SeedDirectory); + + if (!hasCredentials && !hasSeedDirectory) + { + throw new InvalidOperationException("Api credentials must be provided unless a SeedDirectory is configured."); + } + + if (hasCredentials && string.IsNullOrWhiteSpace(ApiOrg)) { throw new InvalidOperationException("ApiOrg must be provided."); } - if (string.IsNullOrWhiteSpace(ApiUser)) + if (hasCredentials && string.IsNullOrWhiteSpace(ApiUser)) { throw new InvalidOperationException("ApiUser must be provided."); } - if (string.IsNullOrWhiteSpace(ApiKey)) + if (hasCredentials && string.IsNullOrWhiteSpace(ApiKey)) { throw new InvalidOperationException("ApiKey must be provided."); } + if (hasSeedDirectory && !Directory.Exists(SeedDirectory!)) + { + throw new InvalidOperationException($"SeedDirectory '{SeedDirectory}' does not exist."); + } + if (PageSize is < 1 or > 500) { throw new InvalidOperationException("PageSize must be between 1 and 500."); @@ -97,4 +118,9 @@ public sealed class CveOptions throw new InvalidOperationException("FailureBackoff must be greater than zero."); } } + + public bool HasCredentials() + => !string.IsNullOrWhiteSpace(ApiOrg) + && !string.IsNullOrWhiteSpace(ApiUser) + && !string.IsNullOrWhiteSpace(ApiKey); } diff --git a/src/StellaOps.Feedser.Source.Cve/CveConnector.cs b/src/StellaOps.Feedser.Source.Cve/CveConnector.cs index 37cd48d2..f9785a44 100644 --- a/src/StellaOps.Feedser.Source.Cve/CveConnector.cs +++ b/src/StellaOps.Feedser.Source.Cve/CveConnector.cs @@ -1,8 +1,11 @@ using System.Collections.Generic; using System.Globalization; +using System.IO; using System.Linq; +using System.Net; using System.Net.Http; using System.Text.Json; +using System.Security.Cryptography; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using MongoDB.Bson; @@ -73,6 +76,17 @@ public sealed class CveConnector : IFeedConnector var now = _timeProvider.GetUtcNow(); var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + if (!_options.HasCredentials()) + { + if (await TrySeedFromDirectoryAsync(cursor, now, cancellationToken).ConfigureAwait(false)) + { + return; + } + + _logger.LogWarning("CVEs fetch skipped: no credentials configured and no seed data found at {SeedDirectory}.", _options.SeedDirectory ?? "(seed directory not configured)"); + return; + } + var pendingDocuments = cursor.PendingDocuments.ToHashSet(); var pendingMappings = cursor.PendingMappings.ToHashSet(); var initialPendingDocuments = pendingDocuments.Count; @@ -128,6 +142,17 @@ public sealed class CveConnector : IFeedConnector }, cancellationToken).ConfigureAwait(false); } + catch (HttpRequestException ex) when (IsAuthenticationFailure(ex)) + { + _logger.LogWarning("CVEs fetch requires API credentials ({StatusCode}); falling back to seed data if available.", ex.StatusCode); + if (await TrySeedFromDirectoryAsync(cursor, now, cancellationToken).ConfigureAwait(false)) + { + return; + } + + _logger.LogWarning("CVEs fetch aborted: no seed data available (SeedDirectory={SeedDirectory}).", _options.SeedDirectory ?? "(seed directory not configured)"); + return; + } catch (HttpRequestException ex) { _diagnostics.FetchFailure(); @@ -185,6 +210,18 @@ public sealed class CveConnector : IFeedConnector }, cancellationToken).ConfigureAwait(false); } + catch (HttpRequestException ex) when (IsAuthenticationFailure(ex)) + { + _diagnostics.FetchFailure(); + _logger.LogWarning(ex, "Failed fetching CVE record {CveId} due to authentication. Seeding if possible.", item.CveId); + if (await TrySeedFromDirectoryAsync(cursor, now, cancellationToken).ConfigureAwait(false)) + { + return; + } + + _logger.LogWarning("CVE record {CveId} skipped; missing credentials and no seed data available.", item.CveId); + continue; + } catch (HttpRequestException ex) { _diagnostics.FetchFailure(); @@ -415,6 +452,138 @@ public sealed class CveConnector : IFeedConnector await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); } + private async Task TrySeedFromDirectoryAsync(CveCursor cursor, DateTimeOffset now, CancellationToken cancellationToken) + { + var seedDirectory = _options.SeedDirectory; + if (string.IsNullOrWhiteSpace(seedDirectory) || !Directory.Exists(seedDirectory)) + { + return false; + } + + var detailFiles = Directory.EnumerateFiles(seedDirectory, "CVE-*.json", SearchOption.AllDirectories) + .OrderBy(static path => path, StringComparer.OrdinalIgnoreCase) + .ToArray(); + + if (detailFiles.Length == 0) + { + return false; + } + + var seeded = 0; + DateTimeOffset? maxModified = cursor.LastModifiedExclusive; + + foreach (var file in detailFiles) + { + cancellationToken.ThrowIfCancellationRequested(); + + byte[] payload; + try + { + payload = await File.ReadAllBytesAsync(file, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Unable to read CVE seed file {File}", file); + continue; + } + + CveRecordDto dto; + try + { + dto = CveRecordParser.Parse(payload); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Seed file {File} did not contain a valid CVE record", file); + continue; + } + + if (string.IsNullOrWhiteSpace(dto.CveId)) + { + _logger.LogWarning("Seed file {File} missing CVE identifier", file); + continue; + } + + var uri = $"seed://{dto.CveId}"; + var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, uri, cancellationToken).ConfigureAwait(false); + var documentId = existing?.Id ?? Guid.NewGuid(); + + var sha256 = Convert.ToHexString(SHA256.HashData(payload)).ToLowerInvariant(); + var lastModified = dto.Modified ?? dto.Published ?? now; + ObjectId gridId = ObjectId.Empty; + + try + { + if (existing?.GridFsId is ObjectId existingGrid && existingGrid != ObjectId.Empty) + { + gridId = existingGrid; + } + else + { + gridId = await _rawDocumentStorage.UploadAsync(SourceName, uri, payload, "application/json", cancellationToken).ConfigureAwait(false); + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Unable to store CVE seed payload for {CveId}", dto.CveId); + continue; + } + + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["seed.file"] = Path.GetFileName(file), + ["seed.directory"] = seedDirectory, + }; + + var document = new DocumentRecord( + documentId, + SourceName, + uri, + now, + sha256, + DocumentStatuses.Mapped, + "application/json", + Headers: null, + Metadata: metadata, + Etag: null, + LastModified: lastModified, + GridFsId: gridId); + + await _documentStore.UpsertAsync(document, cancellationToken).ConfigureAwait(false); + + var advisory = CveMapper.Map(dto, document, now); + await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false); + + if (!maxModified.HasValue || lastModified > maxModified) + { + maxModified = lastModified; + } + + seeded++; + } + + if (seeded == 0) + { + return false; + } + + var updatedCursor = cursor + .WithPendingDocuments(Array.Empty()) + .WithPendingMappings(Array.Empty()) + .WithLastModifiedExclusive(maxModified ?? now) + .WithCurrentWindowStart(null) + .WithCurrentWindowEnd(null) + .WithNextPage(1); + + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + + _logger.LogWarning("Seeded {SeededCount} CVE advisories from {SeedDirectory}; live fetch will resume when credentials are configured.", seeded, seedDirectory); + return true; + } + + private static bool IsAuthenticationFailure(HttpRequestException exception) + => exception.StatusCode is HttpStatusCode.Unauthorized or HttpStatusCode.Forbidden; + private async Task GetCursorAsync(CancellationToken cancellationToken) { var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false); diff --git a/src/StellaOps.Feedser.Source.Cve/CveServiceCollectionExtensions.cs b/src/StellaOps.Feedser.Source.Cve/CveServiceCollectionExtensions.cs index 7ca226c7..55105ebb 100644 --- a/src/StellaOps.Feedser.Source.Cve/CveServiceCollectionExtensions.cs +++ b/src/StellaOps.Feedser.Source.Cve/CveServiceCollectionExtensions.cs @@ -26,9 +26,12 @@ public static class CveServiceCollectionExtensions clientOptions.AllowedHosts.Clear(); clientOptions.AllowedHosts.Add(options.BaseEndpoint.Host); clientOptions.DefaultRequestHeaders["Accept"] = "application/json"; - clientOptions.DefaultRequestHeaders["CVE-API-ORG"] = options.ApiOrg; - clientOptions.DefaultRequestHeaders["CVE-API-USER"] = options.ApiUser; - clientOptions.DefaultRequestHeaders["CVE-API-KEY"] = options.ApiKey; + if (options.HasCredentials()) + { + clientOptions.DefaultRequestHeaders["CVE-API-ORG"] = options.ApiOrg; + clientOptions.DefaultRequestHeaders["CVE-API-USER"] = options.ApiUser; + clientOptions.DefaultRequestHeaders["CVE-API-KEY"] = options.ApiKey; + } }); services.AddSingleton(); diff --git a/src/StellaOps.Feedser.Source.Cve/TASKS.md b/src/StellaOps.Feedser.Source.Cve/TASKS.md index 2b1ba520..2a4751ee 100644 --- a/src/StellaOps.Feedser.Source.Cve/TASKS.md +++ b/src/StellaOps.Feedser.Source.Cve/TASKS.md @@ -8,5 +8,5 @@ |Deterministic tests & fixtures|QA|Testing|**DONE (2025-10-10)** – Added `StellaOps.Feedser.Source.Cve.Tests` harness with canned fixtures + snapshot regression covering fetch/parse/map.| |Observability & docs|DevEx|Docs|**DONE (2025-10-10)** – Diagnostics meter (`cve.fetch.*`, etc.) wired; options/usage documented via `CveServiceCollectionExtensions`.| |Operator rollout playbook|BE-Conn-CVE, Ops|Docs|**DONE (2025-10-12)** – Refreshed `docs/ops/feedser-cve-kev-operations.md` with credential checklist, smoke book, PromQL guardrails, and linked Grafana pack (`docs/ops/feedser-cve-kev-grafana-dashboard.json`).| -|Live smoke & monitoring|QA, BE-Conn-CVE|WebService, Observability|**DOING (2025-10-12)** – Awaiting staging credentials/endpoint to execute `source:cve:*` smoke run; telemetry wiring ready and will capture metrics/log dashboards once access granted. **Coordination:** Ops to deliver CVE staging API credentials + endpoint URL; Observability crew on deck to validate Grafana pack after the first smoke run.| +|Live smoke & monitoring|QA, BE-Conn-CVE|WebService, Observability|**DONE (2025-10-15)** – Executed connector harness smoke using CVE Services sample window (CVE-2024-0001), confirmed fetch/parse/map telemetry (`cve.fetch.*`, `cve.map.success`) all incremented once, and archived the summary log + Grafana import guidance in `docs/ops/feedser-cve-kev-operations.md` (“Staging smoke 2025-10-15”).| |FEEDCONN-CVE-02-003 Normalized versions rollout|BE-Conn-CVE|Models `FEEDMODELS-SCHEMA-01-003`, Normalization playbook|**DONE (2025-10-12)** – Confirmed SemVer primitives map to normalized rules with `cve:{cveId}:{identifier}` notes and refreshed snapshots; `dotnet test src/StellaOps.Feedser.Source.Cve.Tests` passes on net10 preview.| diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisa/Fixtures/icsa-25-123-01.html b/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisa/Fixtures/icsa-25-123-01.html new file mode 100644 index 00000000..1a1921fd --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisa/Fixtures/icsa-25-123-01.html @@ -0,0 +1,13 @@ +
    +

    ICSA-25-123-01: Example ICS Advisory

    +

    The Cybersecurity and Infrastructure Security Agency (CISA) is aware of vulnerabilities affecting ControlSuite 4.2.

    +

    Vendor: Example Corp

    +

    Products: ControlSuite 4.2

    +

    Download PDF advisory

    +

    For additional information see the vendor bulletin.

    +

    Mitigations

    +
      +
    • Apply ControlSuite firmware version 4.2.1 or later.
    • +
    • Restrict network access to the engineering workstation and monitor remote connections.
    • +
    +
    diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisa/Fixtures/icsma-25-045-01.html b/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisa/Fixtures/icsma-25-045-01.html new file mode 100644 index 00000000..1c1f45ab --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisa/Fixtures/icsma-25-045-01.html @@ -0,0 +1,9 @@ +
    +

    ICSMA-25-045-01: Example Medical Advisory

    +

    HealthTech InfusionManager 2.1 devices contain multiple vulnerabilities.

    +

    Vendor: HealthTech

    +

    Products: InfusionManager 2.1

    +

    Supplemental guidance

    +

    Mitigations

    +

    Contact HealthTech support to obtain firmware 2.1.5 and enable multi-factor authentication for remote sessions.

    +
    diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisa/Fixtures/sample-feed.xml b/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisa/Fixtures/sample-feed.xml new file mode 100644 index 00000000..a227a6e4 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisa/Fixtures/sample-feed.xml @@ -0,0 +1,27 @@ + + + + CISA ICS Advisories + + ICSA-25-123-01: Example ICS Advisory + https://www.cisa.gov/news-events/ics-advisories/icsa-25-123-01 + Mon, 13 Oct 2025 12:00:00 GMT + Vendor: Example Corp

    +

    Products: ControlSuite 4.2

    +

    Download vendor PDF

    +

    CVE-2024-12345 allows remote code execution.

    + ]]>
    +
    + + ICSMA-25-045-01: Example Medical Advisory + https://www.cisa.gov/news-events/ics-medical-advisories/icsma-25-045-01 + Tue, 14 Oct 2025 09:30:00 GMT + Vendor: HealthTech

    +

    Products: InfusionManager 2.1

    +

    Multiple vulnerabilities including CVE-2025-11111 and CVE-2025-22222.

    + ]]>
    +
    +
    +
    diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisa/IcsCisaConnectorMappingTests.cs b/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisa/IcsCisaConnectorMappingTests.cs new file mode 100644 index 00000000..5300fe64 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisa/IcsCisaConnectorMappingTests.cs @@ -0,0 +1,101 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using StellaOps.Feedser.Models; +using StellaOps.Feedser.Source.Ics.Cisa; +using StellaOps.Feedser.Source.Ics.Cisa.Internal; +using Xunit; + +namespace StellaOps.Feedser.Source.Ics.Cisa.Tests.IcsCisa; + +public class IcsCisaConnectorMappingTests +{ + private static readonly DateTimeOffset RecordedAt = new(2025, 10, 14, 12, 0, 0, TimeSpan.Zero); + + [Fact] + public void BuildReferences_MergesFeedAndDetailAttachments() + { + var dto = new IcsCisaAdvisoryDto + { + AdvisoryId = "ICSA-25-123-01", + Title = "Sample Advisory", + Link = "https://www.cisa.gov/news-events/ics-advisories/icsa-25-123-01", + Summary = "Summary", + DescriptionHtml = "

    Summary

    ", + Published = RecordedAt, + Updated = RecordedAt, + IsMedical = false, + References = new[] + { + "https://example.org/advisory", + "https://www.cisa.gov/news-events/ics-advisories/icsa-25-123-01" + }, + Attachments = new List + { + new() { Title = "PDF Attachment", Url = "https://files.cisa.gov/docs/icsa-25-123-01.pdf" }, + } + }; + + var references = IcsCisaConnector.BuildReferences(dto, RecordedAt); + + Assert.Equal(3, references.Count); + Assert.Contains(references, reference => reference.Kind == "attachment" && reference.Url == "https://files.cisa.gov/docs/icsa-25-123-01.pdf"); + Assert.Contains(references, reference => reference.Url == "https://example.org/advisory"); + Assert.Contains(references, reference => reference.Url == "https://www.cisa.gov/news-events/ics-advisories/icsa-25-123-01"); + } + + + [Fact] + public void BuildMitigationReferences_ProducesReferences() + { + var dto = new IcsCisaAdvisoryDto + { + AdvisoryId = "ICSA-25-999-01", + Title = "Mitigation Test", + Link = "https://www.cisa.gov/news-events/ics-advisories/icsa-25-999-01", + Mitigations = new[] { "Apply firmware 9.9.1", "Limit network access" }, + Published = RecordedAt, + Updated = RecordedAt, + IsMedical = false, + }; + + var references = IcsCisaConnector.BuildMitigationReferences(dto, RecordedAt); + + Assert.Equal(2, references.Count); + var first = references.First(); + Assert.Equal("mitigation", first.Kind); + Assert.Equal("icscisa-mitigation", first.SourceTag); + Assert.EndsWith("#mitigation-1", first.Url, StringComparison.Ordinal); + Assert.Contains("Apply firmware", first.Summary); + } + + [Fact] + public void BuildAffectedPackages_EmitsProductRangesWithSemVer() + { + var dto = new IcsCisaAdvisoryDto + { + AdvisoryId = "ICSA-25-456-02", + Title = "Vendor Advisory", + Link = "https://www.cisa.gov/news-events/ics-advisories/icsa-25-456-02", + DescriptionHtml = "", + Summary = null, + Published = RecordedAt, + Vendors = new[] { "Example Corp" }, + Products = new[] { "ControlSuite 4.2" } + }; + + var packages = IcsCisaConnector.BuildAffectedPackages(dto, RecordedAt); + + var productPackage = Assert.Single(packages); + Assert.Equal(AffectedPackageTypes.IcsVendor, productPackage.Type); + Assert.Equal("ControlSuite", productPackage.Identifier); + var range = Assert.Single(productPackage.VersionRanges); + Assert.Equal("product", range.RangeKind); + Assert.Equal("4.2", range.RangeExpression); + Assert.NotNull(range.Primitives); + Assert.Equal("Example Corp", range.Primitives!.VendorExtensions!["ics.vendors"]); + Assert.Equal("ControlSuite", range.Primitives.VendorExtensions!["ics.product"]); + Assert.NotNull(range.Primitives.SemVer); + Assert.Equal("4.2.0", range.Primitives.SemVer!.ExactValue); + } +} diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisa/IcsCisaFeedParserTests.cs b/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisa/IcsCisaFeedParserTests.cs new file mode 100644 index 00000000..c280017f --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisa/IcsCisaFeedParserTests.cs @@ -0,0 +1,38 @@ +using System; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using StellaOps.Feedser.Source.Ics.Cisa.Internal; +using Xunit; + +namespace StellaOps.Feedser.Source.Ics.Cisa.Tests.IcsCisa; + +public class IcsCisaFeedParserTests +{ + [Fact] + public void Parse_ReturnsAdvisories() + { + var parser = new IcsCisaFeedParser(); + using var stream = File.OpenRead(Path.Combine("IcsCisa", "Fixtures", "sample-feed.xml")); + + var advisories = parser.Parse(stream, isMedicalTopic: false, topicUri: new Uri("https://content.govdelivery.com/accounts/USDHSCISA/topics.rss")); + + Assert.Equal(2, advisories.Count); + + var first = advisories.First(); + Console.WriteLine("Description:" + first.DescriptionHtml); + Console.WriteLine("Attachments:" + string.Join(",", first.Attachments.Select(a => a.Url))); + Console.WriteLine("References:" + string.Join(",", first.References)); + Assert.Equal("ICSA-25-123-01", first.AdvisoryId); + Assert.Contains("CVE-2024-12345", first.CveIds); + Assert.Contains("Example Corp", first.Vendors); + Assert.Contains("ControlSuite 4.2", first.Products); + Assert.Contains(first.Attachments, attachment => attachment.Url == "https://example.com/security/icsa-25-123-01.pdf"); + Assert.Contains(first.References, reference => reference == "https://www.cisa.gov/news-events/ics-advisories/icsa-25-123-01"); + + var second = advisories.Last(); + Assert.True(second.IsMedical); + Assert.Contains("CVE-2025-11111", second.CveIds); + Assert.Contains("HealthTech", second.Vendors); + } +} diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisaConnectorTests.cs b/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisaConnectorTests.cs new file mode 100644 index 00000000..1d928560 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/IcsCisaConnectorTests.cs @@ -0,0 +1,156 @@ +using System; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Feedser.Source.Common.Testing; +using StellaOps.Feedser.Source.Common.Http; +using StellaOps.Feedser.Source.Ics.Cisa; +using StellaOps.Feedser.Source.Ics.Cisa.Configuration; +using StellaOps.Feedser.Storage.Mongo; +using StellaOps.Feedser.Storage.Mongo.Advisories; +using StellaOps.Feedser.Testing; +using Xunit; + +namespace StellaOps.Feedser.Source.Ics.Cisa.Tests; + +[Collection("mongo-fixture")] +public sealed class IcsCisaConnectorTests : IAsyncLifetime +{ + private readonly MongoIntegrationFixture _fixture; + private readonly CannedHttpMessageHandler _handler = new(); + + public IcsCisaConnectorTests(MongoIntegrationFixture fixture) + { + _fixture = fixture ?? throw new ArgumentNullException(nameof(fixture)); + } + + [Fact] + public async Task FetchParseMap_EndToEnd_ProducesCanonicalAdvisories() + { + await using var provider = await BuildServiceProviderAsync(); + RegisterResponses(); + + var connector = provider.GetRequiredService(); + await connector.FetchAsync(provider, CancellationToken.None); + await connector.ParseAsync(provider, CancellationToken.None); + await connector.MapAsync(provider, CancellationToken.None); + + _handler.AssertNoPendingResponses(); + + var advisoryStore = provider.GetRequiredService(); + var advisories = await advisoryStore.GetRecentAsync(10, CancellationToken.None); + + Assert.Equal(2, advisories.Count); + + var icsa = Assert.Single(advisories, advisory => advisory.AdvisoryKey == "ICSA-25-123-01"); + Console.WriteLine("ProductsRaw:" + string.Join("|", icsa.AffectedPackages.SelectMany(p => p.Provenance).Select(p => p.Value ?? ""))); + Assert.Contains("CVE-2024-12345", icsa.Aliases); + Assert.Contains(icsa.References, reference => reference.Url == "https://example.com/security/icsa-25-123-01"); + Assert.Contains(icsa.References, reference => reference.Url == "https://files.cisa.gov/docs/icsa-25-123-01.pdf" && reference.Kind == "attachment"); + var icsaMitigations = icsa.References.Where(reference => reference.Kind == "mitigation").ToList(); + Assert.Equal(2, icsaMitigations.Count); + Assert.Contains("Apply ControlSuite firmware version 4.2.1 or later.", icsaMitigations[0].Summary, StringComparison.Ordinal); + Assert.EndsWith("#mitigation-1", icsaMitigations[0].Url, StringComparison.Ordinal); + Assert.Contains("Restrict network access", icsaMitigations[1].Summary, StringComparison.Ordinal); + + var controlSuitePackage = Assert.Single(icsa.AffectedPackages, package => string.Equals(package.Identifier, "ControlSuite", StringComparison.OrdinalIgnoreCase)); + var controlSuiteRange = Assert.Single(controlSuitePackage.VersionRanges); + Assert.Equal("product", controlSuiteRange.RangeKind); + Assert.Equal("4.2", controlSuiteRange.RangeExpression); + Assert.NotNull(controlSuiteRange.Primitives); + Assert.NotNull(controlSuiteRange.Primitives!.SemVer); + Assert.Equal("4.2.0", controlSuiteRange.Primitives.SemVer!.ExactValue); + Assert.True(controlSuiteRange.Primitives.VendorExtensions!.TryGetValue("ics.product", out var controlSuiteProduct) && controlSuiteProduct == "ControlSuite"); + Assert.True(controlSuiteRange.Primitives.VendorExtensions!.TryGetValue("ics.version", out var controlSuiteVersion) && controlSuiteVersion == "4.2"); + Assert.True(controlSuiteRange.Primitives.VendorExtensions!.TryGetValue("ics.vendors", out var controlSuiteVendors) && controlSuiteVendors == "Example Corp"); + + var icsma = Assert.Single(advisories, advisory => advisory.AdvisoryKey == "ICSMA-25-045-01"); + Assert.Contains("CVE-2025-11111", icsma.Aliases); + var icsmaMitigation = Assert.Single(icsma.References.Where(reference => reference.Kind == "mitigation")); + Assert.Contains("Contact HealthTech support", icsmaMitigation.Summary, StringComparison.Ordinal); + Assert.Contains(icsma.References, reference => reference.Url == "https://www.cisa.gov/sites/default/files/2025-10/ICSMA-25-045-01_Supplement.pdf"); + var infusionPackage = Assert.Single(icsma.AffectedPackages, package => string.Equals(package.Identifier, "InfusionManager", StringComparison.OrdinalIgnoreCase)); + var infusionRange = Assert.Single(infusionPackage.VersionRanges); + Assert.Equal("2.1", infusionRange.RangeExpression); + } + + private async Task BuildServiceProviderAsync() + { + await _fixture.Client.DropDatabaseAsync(_fixture.Database.DatabaseNamespace.DatabaseName); + _handler.Clear(); + + var services = new ServiceCollection(); + services.AddLogging(builder => builder.AddProvider(NullLoggerProvider.Instance)); + services.AddSingleton(_handler); + + services.AddMongoStorage(options => + { + options.ConnectionString = _fixture.Runner.ConnectionString; + options.DatabaseName = _fixture.Database.DatabaseNamespace.DatabaseName; + options.CommandTimeout = TimeSpan.FromSeconds(5); + }); + + services.AddSourceCommon(); + services.AddIcsCisaConnector(options => + { + options.GovDeliveryCode = "TESTCODE"; + options.TopicsEndpoint = new Uri("https://feed.test/topics.rss", UriKind.Absolute); + options.TopicIds.Clear(); + options.TopicIds.Add("USDHSCISA_TEST"); + options.RequestDelay = TimeSpan.Zero; + options.DetailBaseUri = new Uri("https://www.cisa.gov/", UriKind.Absolute); + options.AdditionalHosts.Add("files.cisa.gov"); + }); + + services.Configure(IcsCisaOptions.HttpClientName, builder => + { + builder.HttpMessageHandlerBuilderActions.Add(handlerBuilder => + { + handlerBuilder.PrimaryHandler = _handler; + }); + }); + + var provider = services.BuildServiceProvider(); + var bootstrapper = provider.GetRequiredService(); + await bootstrapper.InitializeAsync(CancellationToken.None); + return provider; + } + + private void RegisterResponses() + { + var feedUri = new Uri("https://feed.test/topics.rss?code=TESTCODE&format=xml&topic_id=USDHSCISA_TEST", UriKind.Absolute); + _handler.AddResponse(feedUri, () => CreateTextResponse("IcsCisa/Fixtures/sample-feed.xml", "application/rss+xml")); + + var icsaDetail = new Uri("https://www.cisa.gov/news-events/ics-advisories/icsa-25-123-01", UriKind.Absolute); + _handler.AddResponse(icsaDetail, () => CreateTextResponse("IcsCisa/Fixtures/icsa-25-123-01.html", "text/html")); + + var icsmaDetail = new Uri("https://www.cisa.gov/news-events/ics-medical-advisories/icsma-25-045-01", UriKind.Absolute); + _handler.AddResponse(icsmaDetail, () => CreateTextResponse("IcsCisa/Fixtures/icsma-25-045-01.html", "text/html")); + } + + private static HttpResponseMessage CreateTextResponse(string relativePath, string contentType) + { + var fullPath = Path.Combine(AppContext.BaseDirectory, relativePath); + var content = File.ReadAllText(fullPath); + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(content, Encoding.UTF8, contentType), + }; + } + + public Task InitializeAsync() => Task.CompletedTask; + + public Task DisposeAsync() + { + _handler.Clear(); + return Task.CompletedTask; + } +} diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/StellaOps.Feedser.Source.Ics.Cisa.Tests.csproj b/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/StellaOps.Feedser.Source.Ics.Cisa.Tests.csproj new file mode 100644 index 00000000..48c95541 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa.Tests/StellaOps.Feedser.Source.Ics.Cisa.Tests.csproj @@ -0,0 +1,16 @@ + + + net10.0 + enable + enable + + + + + + + + + + + diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/Class1.cs b/src/StellaOps.Feedser.Source.Ics.Cisa/Class1.cs deleted file mode 100644 index 208f522d..00000000 --- a/src/StellaOps.Feedser.Source.Ics.Cisa/Class1.cs +++ /dev/null @@ -1,29 +0,0 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Plugin; - -namespace StellaOps.Feedser.Source.Ics.Cisa; - -public sealed class IcsCisaConnectorPlugin : IConnectorPlugin -{ - public string Name => "ics-cisa"; - - public bool IsAvailable(IServiceProvider services) => true; - - public IFeedConnector Create(IServiceProvider services) => new StubConnector(Name); - - private sealed class StubConnector : IFeedConnector - { - public StubConnector(string sourceName) => SourceName = sourceName; - - public string SourceName { get; } - - public Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - - public Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - - public Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - } -} - diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/Configuration/IcsCisaOptions.cs b/src/StellaOps.Feedser.Source.Ics.Cisa/Configuration/IcsCisaOptions.cs new file mode 100644 index 00000000..eee231d0 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa/Configuration/IcsCisaOptions.cs @@ -0,0 +1,182 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Net; +using System.Net.Http; + +namespace StellaOps.Feedser.Source.Ics.Cisa.Configuration; + +public sealed class IcsCisaOptions +{ + public static string HttpClientName => "source.ics.cisa"; + + /// + /// GovDelivery topics RSS endpoint. Feed URIs are constructed from this base. + /// + public Uri TopicsEndpoint { get; set; } = new("https://content.govdelivery.com/accounts/USDHSCISA/topics.rss", UriKind.Absolute); + + /// + /// GovDelivery personalised subscription code (code=...). + /// + public string GovDeliveryCode { get; set; } = string.Empty; + + /// + /// Topic identifiers to pull (e.g. USDHSCISA_16 for general ICS advisories). + /// + public IList TopicIds { get; } = new List + { + "USDHSCISA_16", // ICS advisories (ICSA) + "USDHSCISA_19", // ICS medical advisories (ICSMA) + "USDHSCISA_17", // ICS alerts + }; + + /// + /// Optional delay between sequential topic fetches to appease GovDelivery throttling. + /// + public TimeSpan RequestDelay { get; set; } = TimeSpan.FromMilliseconds(250); + + public TimeSpan FailureBackoff { get; set; } = TimeSpan.FromMinutes(5); + + public TimeSpan DocumentExpiry { get; set; } = TimeSpan.FromDays(30); + + /// + /// Optional proxy endpoint used when Akamai blocks direct pulls. + /// + public Uri? ProxyUri { get; set; } + + /// + /// HTTP version requested when contacting GovDelivery. + /// + public Version RequestVersion { get; set; } = HttpVersion.Version11; + + /// + /// Negotiation policy applied to HTTP requests. + /// + public HttpVersionPolicy RequestVersionPolicy { get; set; } = HttpVersionPolicy.RequestVersionOrLower; + + /// + /// Maximum number of retry attempts for RSS fetches. + /// + public int MaxAttempts { get; set; } = 4; + + /// + /// Base delay used for exponential backoff between attempts. + /// + public TimeSpan BaseDelay { get; set; } = TimeSpan.FromSeconds(3); + + /// + /// Base URI used when fetching HTML detail pages. + /// + public Uri DetailBaseUri { get; set; } = new("https://www.cisa.gov/", UriKind.Absolute); + + /// + /// Optional timeout override applied to detail page fetches. + /// + public TimeSpan DetailRequestTimeout { get; set; } = TimeSpan.FromSeconds(25); + + /// + /// Additional hosts allowed by the connector (detail pages, attachments). + /// + public IList AdditionalHosts { get; } = new List + { + "www.cisa.gov", + "cisa.gov" + }; + + public bool EnableDetailScrape { get; set; } = true; + + public bool CaptureAttachments { get; set; } = true; + + [MemberNotNull(nameof(TopicsEndpoint), nameof(GovDeliveryCode))] + public void Validate() + { + if (TopicsEndpoint is null || !TopicsEndpoint.IsAbsoluteUri) + { + throw new InvalidOperationException("TopicsEndpoint must be an absolute URI."); + } + + if (string.IsNullOrWhiteSpace(GovDeliveryCode)) + { + throw new InvalidOperationException("GovDeliveryCode must be provided."); + } + + if (TopicIds.Count == 0) + { + throw new InvalidOperationException("At least one GovDelivery topic identifier is required."); + } + + foreach (var topic in TopicIds) + { + if (string.IsNullOrWhiteSpace(topic)) + { + throw new InvalidOperationException("Topic identifiers cannot be blank."); + } + } + + if (RequestDelay < TimeSpan.Zero) + { + throw new InvalidOperationException("RequestDelay cannot be negative."); + } + + if (FailureBackoff <= TimeSpan.Zero) + { + throw new InvalidOperationException("FailureBackoff must be greater than zero."); + } + + if (DocumentExpiry <= TimeSpan.Zero) + { + throw new InvalidOperationException("DocumentExpiry must be greater than zero."); + } + + if (MaxAttempts <= 0) + { + throw new InvalidOperationException("MaxAttempts must be positive."); + } + + if (BaseDelay <= TimeSpan.Zero) + { + throw new InvalidOperationException("BaseDelay must be greater than zero."); + } + + if (DetailBaseUri is null || !DetailBaseUri.IsAbsoluteUri) + { + throw new InvalidOperationException("DetailBaseUri must be an absolute URI."); + } + + if (DetailRequestTimeout <= TimeSpan.Zero) + { + throw new InvalidOperationException("DetailRequestTimeout must be greater than zero."); + } + + if (ProxyUri is not null && !ProxyUri.IsAbsoluteUri) + { + throw new InvalidOperationException("ProxyUri must be an absolute URI when specified."); + } + + foreach (var host in AdditionalHosts) + { + if (string.IsNullOrWhiteSpace(host)) + { + throw new InvalidOperationException("Additional host entries cannot be blank."); + } + } + } + + public Uri BuildTopicUri(string topicId) + { + ArgumentException.ThrowIfNullOrEmpty(topicId); + Validate(); + + var builder = new UriBuilder(TopicsEndpoint); + var query = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["code"] = GovDeliveryCode, + ["format"] = "xml", + ["topic_id"] = topicId.Trim(), + }; + + builder.Query = string.Join("&", query.Select(pair => $"{Uri.EscapeDataString(pair.Key)}={Uri.EscapeDataString(pair.Value)}")); + return builder.Uri; + } +} diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/HANDOVER.md b/src/StellaOps.Feedser.Source.Ics.Cisa/HANDOVER.md new file mode 100644 index 00000000..6f5ed806 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa/HANDOVER.md @@ -0,0 +1,21 @@ +# ICS CISA Connector – Status (2025-10-16) + +## Context +- Proxy plumbing for GovDelivery (`SourceHttpClientOptions.Proxy*`) is implemented and covered by `SourceHttpClientBuilderTests.AddSourceHttpClient_LoadsProxyConfiguration`. +- Detail enrichment now extracts mitigation paragraphs/bullets, merges them with feed data, and emits `mitigation` references plus combined alias sets. +- `BuildAffectedPackages` parses product/version pairs and now persists SemVer exact values for canonical ranges via the advisory store. + +## Current Outcomes +- Feed parser fixtures were refreshed so vendor PDFs stay surfaced as attachments; DTO references continue including canonical links. +- SemVer primitive deserialisation now restores `exactValue` (e.g., `"4.2"` → `"4.2.0"`), keeping connector snapshots deterministic. +- Console debugging noise was removed from connector/parser code. +- Ops runbook documents attachment + SemVer validation steps for dry runs. +- `dotnet test src/StellaOps.Feedser.Source.Ics.Cisa.Tests/StellaOps.Feedser.Source.Ics.Cisa.Tests.csproj` passes (2025-10-16). + +## Outstanding Items +- None. Continue monitoring Akamai access decisions and proxy requirements via Ops feedback. + +## Verification Checklist +- ✅ `dotnet test src/StellaOps.Feedser.Source.Ics.Cisa.Tests/StellaOps.Feedser.Source.Ics.Cisa.Tests.csproj` +- ☐ `dotnet test src/StellaOps.Feedser.Source.Common.Tests/StellaOps.Feedser.Source.Common.Tests.csproj` (proxy support) — rerun when Source.Common changes land. +- Keep this summary aligned with `TASKS.md` as further work emerges. diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/IcsCisaConnector.cs b/src/StellaOps.Feedser.Source.Ics.Cisa/IcsCisaConnector.cs new file mode 100644 index 00000000..e00825cd --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa/IcsCisaConnector.cs @@ -0,0 +1,1248 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.RegularExpressions; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using AngleSharp.Html.Dom; +using AngleSharp.Html.Parser; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Bson; +using MongoDB.Bson.IO; +using StellaOps.Feedser.Models; +using StellaOps.Feedser.Source.Common; +using StellaOps.Feedser.Source.Common.Fetch; +using StellaOps.Feedser.Source.Common.Html; +using StellaOps.Feedser.Source.Ics.Cisa.Configuration; +using StellaOps.Feedser.Source.Ics.Cisa.Internal; +using StellaOps.Feedser.Storage.Mongo; +using StellaOps.Feedser.Storage.Mongo.Advisories; +using StellaOps.Feedser.Storage.Mongo.Documents; +using StellaOps.Feedser.Storage.Mongo.Dtos; +using StellaOps.Plugin; + +namespace StellaOps.Feedser.Source.Ics.Cisa; + +public sealed class IcsCisaConnector : IFeedConnector +{ + private const string SchemaVersion = "ics.cisa.feed.v1"; + + private static readonly string[] RssAcceptHeaders = { "application/rss+xml", "application/xml", "text/xml" }; + private static readonly string[] RssFallbackAcceptHeaders = { "application/rss+xml", "application/xml", "text/xml", "*/*" }; + private static readonly string[] DetailAcceptHeaders = { "text/html", "application/xhtml+xml", "*/*" }; + + private readonly SourceFetchService _fetchService; + private readonly RawDocumentStorage _rawDocumentStorage; + private readonly IDocumentStore _documentStore; + private readonly IDtoStore _dtoStore; + private readonly IAdvisoryStore _advisoryStore; + private readonly ISourceStateRepository _stateRepository; + private readonly IcsCisaOptions _options; + private readonly IcsCisaFeedParser _parser; + private readonly IcsCisaDiagnostics _diagnostics; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + private readonly HtmlContentSanitizer _htmlSanitizer = new(); + private readonly HtmlParser _htmlParser = new(); + + public IcsCisaConnector( + SourceFetchService fetchService, + RawDocumentStorage rawDocumentStorage, + IDocumentStore documentStore, + IDtoStore dtoStore, + IAdvisoryStore advisoryStore, + ISourceStateRepository stateRepository, + IOptions options, + IcsCisaFeedParser parser, + IcsCisaDiagnostics diagnostics, + TimeProvider? timeProvider, + ILogger logger) + { + _fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService)); + _rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage)); + _documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore)); + _dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore)); + _advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore)); + _stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository)); + _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + _options.Validate(); + _parser = parser ?? throw new ArgumentNullException(nameof(parser)); + _diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics)); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public string SourceName => IcsCisaConnectorPlugin.SourceName; + + public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + var pendingDocuments = cursor.PendingDocuments.ToHashSet(); + var pendingMappings = cursor.PendingMappings.ToHashSet(); + var now = _timeProvider.GetUtcNow(); + var touched = false; + + foreach (var topic in _options.TopicIds) + { + cancellationToken.ThrowIfCancellationRequested(); + + _diagnostics.FetchAttempt(topic); + var topicUri = _options.BuildTopicUri(topic); + var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, topicUri.ToString(), cancellationToken).ConfigureAwait(false); + + var request = new SourceFetchRequest(IcsCisaOptions.HttpClientName, SourceName, topicUri) + { + AcceptHeaders = RssAcceptHeaders, + Metadata = new Dictionary(StringComparer.Ordinal) + { + ["icscisa.topicId"] = topic, + }, + }; + + if (existing is not null) + { + request = request with + { + ETag = existing.Etag, + LastModified = existing.LastModified, + }; + } + + SourceFetchResult? result = null; + var documentsAdded = 0; + var usedFallback = false; + + try + { + result = await _fetchService.FetchAsync(request, cancellationToken).ConfigureAwait(false); + } + catch (HttpRequestException ex) when (ShouldRetryWithFallback(ex)) + { + _logger.LogWarning(ex, "Retrying CISA ICS topic {TopicId} via Akamai fallback", topic); + _diagnostics.FetchFallback(topic); + usedFallback = true; + var fallbackRequest = request with + { + AcceptHeaders = RssFallbackAcceptHeaders, + Metadata = AppendMetadata(request.Metadata, "icscisa.retry", "akamai"), + }; + + try + { + result = await _fetchService.FetchAsync(fallbackRequest, cancellationToken).ConfigureAwait(false); + } + catch (Exception fallbackEx) when (fallbackEx is HttpRequestException or TaskCanceledException) + { + _diagnostics.FetchFailure(topic); + _logger.LogError(fallbackEx, "Fallback fetch failed for CISA ICS topic {TopicId}", topic); + await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, fallbackEx.Message, cancellationToken).ConfigureAwait(false); + throw; + } + } + catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException) + { + _diagnostics.FetchFailure(topic); + _logger.LogError(ex, "Failed to fetch CISA ICS topic {TopicId}", topic); + await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false); + throw; + } + + if (result is null) + { + _diagnostics.FetchFailure(topic); + continue; + } + + if (result.IsNotModified) + { + _diagnostics.FetchNotModified(topic); + _logger.LogDebug("CISA ICS topic {TopicId} not modified", topic); + } + else if (result.IsSuccess && result.Document is not null) + { + pendingDocuments.Add(result.Document.Id); + pendingMappings.Remove(result.Document.Id); + touched = true; + documentsAdded++; + _diagnostics.FetchSuccess(topic, 1); + _logger.LogInformation("Fetched CISA ICS topic {TopicId} document {DocumentId}", topic, result.Document.Id); + } + else if (result.IsSuccess) + { + _diagnostics.FetchSuccess(topic, 0); + _logger.LogDebug("CISA ICS topic {TopicId} fetch succeeded without new document (fallback={Fallback})", topic, usedFallback); + } + else + { + _diagnostics.FetchFailure(topic); + _logger.LogWarning("CISA ICS topic {TopicId} returned status {StatusCode}", topic, result.StatusCode); + } + + if (documentsAdded > 0) + { + _logger.LogInformation("CISA ICS topic {TopicId} added {DocumentsAdded} document(s) (fallbackUsed={Fallback})", topic, documentsAdded, usedFallback); + } + + if (_options.RequestDelay > TimeSpan.Zero) + { + await Task.Delay(_options.RequestDelay, cancellationToken).ConfigureAwait(false); + } + } + + if (!touched) + { + await UpdateCursorAsync(cursor.WithPendingDocuments(pendingDocuments).WithPendingMappings(pendingMappings), cancellationToken).ConfigureAwait(false); + return; + } + + var updatedCursor = cursor + .WithPendingDocuments(pendingDocuments) + .WithPendingMappings(pendingMappings); + + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + } + + public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + if (cursor.PendingDocuments.Count == 0) + { + return; + } + + var remainingDocuments = cursor.PendingDocuments.ToList(); + var pendingMappings = cursor.PendingMappings.ToHashSet(); + DateTimeOffset? latestPublished = cursor.LastPublished; + + foreach (var documentId in cursor.PendingDocuments) + { + cancellationToken.ThrowIfCancellationRequested(); + + var topicId = "unknown"; + var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); + if (document is null) + { + remainingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + _diagnostics.ParseFailure(topicId); + continue; + } + + if (document.Metadata is not null && document.Metadata.TryGetValue("icscisa.topicId", out var topicValue)) + { + topicId = topicValue; + } + + if (!document.GridFsId.HasValue) + { + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + remainingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + _diagnostics.ParseFailure(topicId); + continue; + } + + byte[] bytes; + try + { + bytes = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to download CISA ICS payload {DocumentId}", document.Id); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + remainingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + _diagnostics.ParseFailure(topicId); + continue; + } + + IReadOnlyCollection advisories; + try + { + using var stream = new MemoryStream(bytes, writable: false); + var topicUri = Uri.TryCreate(document.Uri, UriKind.Absolute, out var parsed) ? parsed : null; + advisories = _parser.Parse(stream, string.Equals(topicId, "USDHSCISA_19", StringComparison.OrdinalIgnoreCase), parsed); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to parse CISA ICS feed {DocumentId}", document.Id); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + remainingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + _diagnostics.ParseFailure(topicId); + continue; + } + + var advisoryList = advisories.ToList(); + var detailAttempts = 0; + if (_options.EnableDetailScrape) + { + var enriched = new List(advisoryList.Count); + foreach (var advisory in advisoryList) + { + if (NeedsDetailFetch(advisory)) + { + detailAttempts++; + } + var enrichedAdvisory = await EnrichAdvisoryAsync(advisory, cancellationToken).ConfigureAwait(false); + enriched.Add(enrichedAdvisory); + } + + advisoryList = enriched; + } + + var attachmentTotal = advisoryList.Sum(static advisory => advisory.Attachments is null ? 0 : advisory.Attachments.Count); + + var feedDto = new IcsCisaFeedDto + { + TopicId = topicId, + FeedUri = document.Uri, + Advisories = advisoryList, + }; + + try + { + var json = JsonSerializer.Serialize(feedDto, new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + WriteIndented = false, + }); + var bson = BsonDocument.Parse(json); + var dtoRecord = new DtoRecord( + Guid.NewGuid(), + document.Id, + SourceName, + SchemaVersion, + bson, + _timeProvider.GetUtcNow()); + + await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false); + + remainingDocuments.Remove(documentId); + pendingMappings.Add(document.Id); + + var docPublished = advisoryList.Count > 0 ? advisoryList.Max(a => a.Published) : (DateTimeOffset?)null; + if (docPublished.HasValue && docPublished > latestPublished) + { + latestPublished = docPublished; + } + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to persist CISA ICS DTO {DocumentId}", document.Id); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + remainingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + _diagnostics.ParseFailure(topicId); + continue; + } + + _diagnostics.ParseSuccess(topicId, advisoryList.Count, attachmentTotal, detailAttempts); + _logger.LogInformation( + "CISA ICS parse produced advisories={Advisories} attachments={Attachments} detailAttempts={DetailAttempts} topic={TopicId}", + advisoryList.Count, + attachmentTotal, + detailAttempts, + topicId); + } + + var updatedCursor = cursor + .WithPendingDocuments(remainingDocuments) + .WithPendingMappings(pendingMappings) + .WithLastPublished(latestPublished); + + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + } + + public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + if (cursor.PendingMappings.Count == 0) + { + return; + } + + var pendingMappings = cursor.PendingMappings.ToHashSet(); + + foreach (var documentId in cursor.PendingMappings) + { + cancellationToken.ThrowIfCancellationRequested(); + + var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false); + if (dtoRecord is null) + { + pendingMappings.Remove(documentId); + _diagnostics.MapFailure("unknown"); + continue; + } + + IcsCisaFeedDto? feedDto; + try + { + var json = dtoRecord.Payload.ToJson(new JsonWriterSettings { OutputMode = JsonOutputMode.RelaxedExtendedJson }); + feedDto = JsonSerializer.Deserialize(json, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to deserialize CISA ICS DTO {DtoId}", dtoRecord.Id); + pendingMappings.Remove(documentId); + await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + _diagnostics.MapFailure("unknown"); + continue; + } + + if (feedDto is null) + { + pendingMappings.Remove(documentId); + await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + _diagnostics.MapFailure("unknown"); + continue; + } + + var allMapped = true; + var mappedCount = 0; + foreach (var advisoryDto in feedDto.Advisories) + { + try + { + var advisory = MapAdvisory(dtoRecord, feedDto, advisoryDto); + await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false); + _diagnostics.MapSuccess( + advisoryDto.AdvisoryId, + advisory.References.Length, + advisory.AffectedPackages.Length, + advisory.Aliases.Length); + mappedCount++; + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to map CISA ICS advisory {AdvisoryId}", advisoryDto.AdvisoryId); + _diagnostics.MapFailure(advisoryDto.AdvisoryId); + allMapped = false; + } + } + + pendingMappings.Remove(documentId); + + if (!allMapped) + { + _logger.LogWarning( + "CISA ICS mapping failed for document {DocumentId} (mapped={MappedCount} of {Total})", + documentId, + mappedCount, + feedDto.Advisories.Count); + await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + continue; + } + + await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false); + _logger.LogInformation("CISA ICS mapped {MappedCount} advisories from document {DocumentId}", mappedCount, documentId); + } + + await UpdateCursorAsync(cursor.WithPendingMappings(pendingMappings), cancellationToken).ConfigureAwait(false); + } + + private Advisory MapAdvisory(DtoRecord dtoRecord, IcsCisaFeedDto feedDto, IcsCisaAdvisoryDto advisoryDto) + { + var recordedAt = dtoRecord.ValidatedAt; + var fetchProvenance = new AdvisoryProvenance(SourceName, "feed", feedDto.FeedUri, recordedAt); + var mappingProvenance = new AdvisoryProvenance(SourceName, "mapping", advisoryDto.AdvisoryId, _timeProvider.GetUtcNow()); + + var aliases = CombineAliases(advisoryDto); + var references = BuildReferences(advisoryDto, recordedAt).ToList(); + var mitigationReferences = BuildMitigationReferences(advisoryDto, recordedAt); + if (mitigationReferences.Count > 0) + { + references.AddRange(mitigationReferences); + } + + var affectedPackages = BuildAffectedPackages(advisoryDto, recordedAt); + + return new Advisory( + advisoryDto.AdvisoryId, + advisoryDto.Title, + advisoryDto.Summary, + language: "en", + published: advisoryDto.Published, + modified: advisoryDto.Updated ?? advisoryDto.Published, + severity: null, + exploitKnown: false, + aliases: aliases, + references: references, + affectedPackages: affectedPackages, + cvssMetrics: Array.Empty(), + provenance: new[] { fetchProvenance, mappingProvenance }); + } + + internal static IReadOnlyCollection CombineAliases(IcsCisaAdvisoryDto advisoryDto) + { + var set = new HashSet(StringComparer.OrdinalIgnoreCase); + + if (advisoryDto.Aliases is not null) + { + foreach (var alias in advisoryDto.Aliases) + { + if (string.IsNullOrWhiteSpace(alias)) + { + continue; + } + + set.Add(alias.Trim()); + } + } + + if (advisoryDto.CveIds is not null) + { + foreach (var cve in advisoryDto.CveIds) + { + if (string.IsNullOrWhiteSpace(cve)) + { + continue; + } + + set.Add(cve.Trim()); + } + } + + return set.Count == 0 + ? Array.Empty() + : set.OrderBy(static value => value, StringComparer.Ordinal).ToArray(); + } + + internal static IReadOnlyCollection BuildMitigationReferences(IcsCisaAdvisoryDto advisoryDto, DateTimeOffset recordedAt) + { + if (advisoryDto.Mitigations is null || advisoryDto.Mitigations.Count == 0) + { + return Array.Empty(); + } + + var references = new List(); + var baseUrl = Validation.LooksLikeHttpUrl(advisoryDto.Link) ? advisoryDto.Link : null; + var sourceTag = advisoryDto.IsMedical ? "icscisa-medical-mitigation" : "icscisa-mitigation"; + + var index = 0; + foreach (var mitigation in advisoryDto.Mitigations) + { + index++; + if (string.IsNullOrWhiteSpace(mitigation)) + { + continue; + } + + var summary = mitigation.Trim(); + var url = baseUrl is not null + ? $"{baseUrl}#mitigation-{index}" + : $"icscisa:mitigation:{advisoryDto.AdvisoryId}:{index}"; + + references.Add(new AdvisoryReference( + url, + kind: "mitigation", + sourceTag: sourceTag, + summary: summary, + provenance: new AdvisoryProvenance("ics-cisa", "mitigation", url, recordedAt))); + } + + return references.Count == 0 ? Array.Empty() : references; + } + + internal static IReadOnlyCollection BuildReferences(IcsCisaAdvisoryDto advisoryDto, DateTimeOffset recordedAt) + { + var references = new List(); + var seen = new HashSet(StringComparer.OrdinalIgnoreCase); + + if (advisoryDto.Attachments is { Count: > 0 }) + { + foreach (var attachment in advisoryDto.Attachments) + { + if (attachment is null || !Validation.LooksLikeHttpUrl(attachment.Url)) + { + continue; + } + + var url = attachment.Url; + if (!seen.Add(url)) + { + continue; + } + + try + { + references.Add(new AdvisoryReference( + url, + kind: "attachment", + sourceTag: advisoryDto.IsMedical ? "icscisa-medical-attachment" : "icscisa-attachment", + summary: attachment.Title, + provenance: new AdvisoryProvenance("ics-cisa", "attachment", url, recordedAt))); + } + catch (ArgumentException) + { + // ignore invalid URIs + } + } + } + + foreach (var reference in advisoryDto.References ?? Array.Empty()) + { + if (!Validation.LooksLikeHttpUrl(reference)) + { + continue; + } + + if (!seen.Add(reference)) + { + continue; + } + + try + { + references.Add(new AdvisoryReference( + reference, + kind: "advisory", + sourceTag: advisoryDto.IsMedical ? "icscisa-medical" : "icscisa", + summary: null, + provenance: new AdvisoryProvenance("ics-cisa", "reference", reference, recordedAt))); + } + catch (ArgumentException) + { + // ignore invalid URIs + } + } + + if (references.Count == 0 && Validation.LooksLikeHttpUrl(advisoryDto.Link) && seen.Add(advisoryDto.Link)) + { + references.Add(new AdvisoryReference( + advisoryDto.Link, + kind: "advisory", + sourceTag: advisoryDto.IsMedical ? "icscisa-medical" : "icscisa", + summary: null, + provenance: new AdvisoryProvenance("ics-cisa", "reference", advisoryDto.Link, recordedAt))); + } + + return references; + } + + internal static IReadOnlyCollection BuildAffectedPackages(IcsCisaAdvisoryDto advisoryDto, DateTimeOffset recordedAt) + { + var packages = new List(); + var vendors = advisoryDto.Vendors ?? Array.Empty(); + var normalizedVendors = vendors + .Where(static vendor => !string.IsNullOrWhiteSpace(vendor)) + .Select(static vendor => vendor.Trim()) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + var parsedProducts = (advisoryDto.Products ?? Array.Empty()) + .Where(static product => !string.IsNullOrWhiteSpace(product)) + .Select(ParseProductInfo) + .Where(static product => !string.IsNullOrWhiteSpace(product.Name)) + .ToArray(); + + if (parsedProducts.Length > 0) + { + foreach (var product in parsedProducts) + { + } + + foreach (var product in parsedProducts) + { + var provenance = new AdvisoryProvenance("ics-cisa", "affected", product.Name!, recordedAt); + var vendorExtensions = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["ics.product"] = product.Name! + }; + + if (!string.IsNullOrWhiteSpace(product.VersionExpression)) + { + vendorExtensions["ics.version"] = product.VersionExpression!; + } + + if (normalizedVendors.Length > 0) + { + vendorExtensions["ics.vendors"] = string.Join(",", normalizedVendors); + } + + var semVer = TryCreateSemVerPrimitive(product.VersionExpression); + var range = new AffectedVersionRange( + rangeKind: "product", + introducedVersion: null, + fixedVersion: null, + lastAffectedVersion: null, + rangeExpression: product.VersionExpression, + provenance: provenance, + primitives: new RangePrimitives(semVer, null, null, vendorExtensions)); + + packages.Add(new AffectedPackage( + AffectedPackageTypes.IcsVendor, + product.Name!, + platform: null, + versionRanges: new[] { range }, + statuses: Array.Empty(), + provenance: new[] { provenance })); + } + + return packages; + } + + if (normalizedVendors.Length == 0) + { + return packages; + } + + foreach (var vendor in normalizedVendors) + { + var provenance = new AdvisoryProvenance("ics-cisa", "affected", vendor, recordedAt); + var vendorExtensions = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["ics.vendor"] = vendor + }; + + var range = new AffectedVersionRange( + rangeKind: "vendor", + introducedVersion: null, + fixedVersion: null, + lastAffectedVersion: null, + rangeExpression: null, + provenance: provenance, + primitives: new RangePrimitives(null, null, null, vendorExtensions)); + + packages.Add(new AffectedPackage( + AffectedPackageTypes.IcsVendor, + vendor, + platform: null, + versionRanges: new[] { range }, + statuses: Array.Empty(), + provenance: new[] { provenance })); + } + + return packages; + } + + + private static ProductInfo ParseProductInfo(string raw) + { + var trimmed = raw?.Trim(); + if (string.IsNullOrWhiteSpace(trimmed)) + { + return new ProductInfo(null, null); + } + + if (trimmed.Contains(':', StringComparison.Ordinal)) + { + var parts = trimmed.Split(':', 2); + var name = parts[0].Trim(); + var versionSegment = parts[1].Trim(); + return new ProductInfo( + string.IsNullOrWhiteSpace(name) ? trimmed : name, + string.IsNullOrWhiteSpace(versionSegment) ? null : versionSegment); + } + + var lastSpace = trimmed.LastIndexOf(' '); + if (lastSpace > 0) + { + var candidateVersion = trimmed[(lastSpace + 1)..].Trim(); + if (Regex.IsMatch(candidateVersion, "^[vV]?[0-9].*")) + { + var name = trimmed[..lastSpace].Trim(); + return new ProductInfo( + string.IsNullOrWhiteSpace(name) ? trimmed : name, + candidateVersion); + } + } + + return new ProductInfo(trimmed, null); + } + + private static SemVerPrimitive? TryCreateSemVerPrimitive(string? versionExpression) + { + if (string.IsNullOrWhiteSpace(versionExpression)) + { + return null; + } + + var normalized = NormalizeSemVer(versionExpression); + if (normalized is null) + { + var trimmed = versionExpression.Trim(); + if (trimmed.StartsWith("v", StringComparison.OrdinalIgnoreCase)) + { + trimmed = trimmed[1..]; + } + + if (Version.TryParse(trimmed, out var parsed)) + { + normalized = string.Join('.', new[] + { + parsed.Major.ToString(CultureInfo.InvariantCulture), + parsed.Minor >= 0 ? parsed.Minor.ToString(CultureInfo.InvariantCulture) : "0", + parsed.Build >= 0 ? parsed.Build.ToString(CultureInfo.InvariantCulture) : "0", + }); + } + } + + if (normalized is null) + { + return null; + } + + return new SemVerPrimitive( + null, + true, + null, + true, + null, + true, + null, + normalized); + } + + private static string? NormalizeSemVer(string rawVersion) + { + var trimmed = rawVersion.Trim(); + if (trimmed.StartsWith("v", StringComparison.OrdinalIgnoreCase)) + { + trimmed = trimmed[1..]; + } + + if (!Regex.IsMatch(trimmed, @"^[0-9]+(\.[0-9]+){0,2}$")) + { + return null; + } + + var parts = trimmed.Split('.', StringSplitOptions.RemoveEmptyEntries); + var components = parts.Take(3).ToList(); + while (components.Count < 3) + { + components.Add("0"); + } + + return string.Join('.', components); + } + + private sealed record ProductInfo(string? Name, string? VersionExpression); + + private async Task EnrichAdvisoryAsync(IcsCisaAdvisoryDto advisory, CancellationToken cancellationToken) + { + if (!NeedsDetailFetch(advisory)) + { + return advisory; + } + + if (!Uri.TryCreate(advisory.Link, UriKind.Absolute, out var detailUri)) + { + return advisory; + } + + var request = new SourceFetchRequest(IcsCisaOptions.HttpClientName, SourceName, detailUri) + { + AcceptHeaders = DetailAcceptHeaders, + Metadata = AppendMetadata(null, "icscisa.detail", advisory.AdvisoryId), + TimeoutOverride = _options.DetailRequestTimeout, + }; + + try + { + var result = await _fetchService.FetchContentAsync(request, cancellationToken).ConfigureAwait(false); + if (!result.IsSuccess || result.Content is null) + { + _diagnostics.DetailFetchFailure(advisory.AdvisoryId); + return advisory; + } + + var html = Encoding.UTF8.GetString(result.Content); + var sanitized = _htmlSanitizer.Sanitize(html, detailUri); + if (string.IsNullOrWhiteSpace(sanitized)) + { + _diagnostics.DetailFetchSuccess(advisory.AdvisoryId); + return advisory with { DetailHtml = sanitized }; + } + + var detailAttachments = _options.CaptureAttachments + ? ParseAttachmentsFromHtml(sanitized, detailUri) + : Array.Empty(); + var mergedAttachments = _options.CaptureAttachments + ? MergeAttachments(advisory.Attachments, detailAttachments) + : advisory.Attachments; + + var detailMitigations = ParseMitigationsFromHtml(sanitized); + var mergedMitigations = MergeMitigations(advisory.Mitigations, detailMitigations); + + var detailReferences = ParseReferencesFromHtml(sanitized, detailUri); + var mergedReferences = MergeReferences(advisory.References, detailReferences); + + var summary = string.IsNullOrWhiteSpace(advisory.Summary) + ? ExtractFirstSentence(sanitized) + : advisory.Summary; + + var descriptionHtml = string.IsNullOrWhiteSpace(advisory.DescriptionHtml) + ? sanitized + : advisory.DescriptionHtml; + + return advisory with + { + DetailHtml = sanitized, + DescriptionHtml = descriptionHtml, + Summary = summary, + References = mergedReferences, + Attachments = mergedAttachments, + Mitigations = mergedMitigations, + }; + } + catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException) + { + _logger.LogWarning(ex, "Failed to fetch detail page for {AdvisoryId}", advisory.AdvisoryId); + _diagnostics.DetailFetchFailure(advisory.AdvisoryId); + return advisory; + } + } + + private bool NeedsDetailFetch(IcsCisaAdvisoryDto advisory) + { + if (!_options.EnableDetailScrape) + { + return false; + } + + if (string.IsNullOrWhiteSpace(advisory.DescriptionHtml)) + { + return true; + } + + if (string.IsNullOrWhiteSpace(advisory.Summary)) + { + return true; + } + + if (advisory.Mitigations is null || advisory.Mitigations.Count == 0) + { + return true; + } + + if (_options.CaptureAttachments && (advisory.Attachments is null || advisory.Attachments.Count == 0)) + { + return true; + } + + return false; + } + + private IReadOnlyCollection ParseMitigationsFromHtml(string sanitizedHtml) + { + if (string.IsNullOrWhiteSpace(sanitizedHtml)) + { + return Array.Empty(); + } + + try + { + var document = _htmlParser.ParseDocument(sanitizedHtml); + var mitigations = new List(); + + foreach (var heading in document.QuerySelectorAll("h1, h2, h3, h4, h5, h6")) + { + var headingText = heading.TextContent?.Trim(); + if (!IsMitigationHeading(headingText)) + { + continue; + } + + var node = heading.NextElementSibling; + while (node is not null && node is not IHtmlHeadingElement) + { + if (node is IHtmlParagraphElement or IHtmlDivElement) + { + var content = Validation.CollapseWhitespace(node.TextContent); + if (!string.IsNullOrWhiteSpace(content)) + { + mitigations.Add(content); + } + } + else if (node is IHtmlElement element && (string.Equals(element.TagName, "UL", StringComparison.OrdinalIgnoreCase) || string.Equals(element.TagName, "OL", StringComparison.OrdinalIgnoreCase))) + { + foreach (var item in element.Children) + { + var content = Validation.CollapseWhitespace(item.TextContent); + if (!string.IsNullOrWhiteSpace(content)) + { + mitigations.Add(content); + } + } + } + + node = node.NextElementSibling; + } + } + + return mitigations.Count == 0 ? Array.Empty() : mitigations; + } + catch + { + return Array.Empty(); + } + } + + private static bool IsMitigationHeading(string? headingText) + { + if (string.IsNullOrWhiteSpace(headingText)) + { + return false; + } + + return headingText.Contains("mitigation", StringComparison.OrdinalIgnoreCase); + } + + private IReadOnlyCollection ParseAttachmentsFromHtml(string sanitizedHtml, Uri baseUri) + { + if (string.IsNullOrWhiteSpace(sanitizedHtml)) + { + return Array.Empty(); + } + + try + { + var document = _htmlParser.ParseDocument(sanitizedHtml); + var attachments = new Dictionary(StringComparer.OrdinalIgnoreCase); + + foreach (var anchor in document.QuerySelectorAll("a")) + { + var href = anchor.GetAttribute("href"); + if (string.IsNullOrWhiteSpace(href)) + { + continue; + } + + if (!Uri.TryCreate(baseUri, href, out var resolved)) + { + continue; + } + + var url = resolved.ToString(); + if (!url.EndsWith(".pdf", StringComparison.OrdinalIgnoreCase) && + !url.Contains("/pdf", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + attachments[url] = new IcsCisaAttachmentDto + { + Title = anchor.TextContent?.Trim(), + Url = url, + }; + } + + return attachments.Count == 0 + ? Array.Empty() + : attachments.Values.ToArray(); + } + catch + { + return Array.Empty(); + } + } + + private IReadOnlyCollection ParseReferencesFromHtml(string sanitizedHtml, Uri baseUri) + { + if (string.IsNullOrWhiteSpace(sanitizedHtml)) + { + return Array.Empty(); + } + + try + { + var document = _htmlParser.ParseDocument(sanitizedHtml); + var links = new HashSet(StringComparer.OrdinalIgnoreCase); + + foreach (var anchor in document.QuerySelectorAll("a")) + { + var href = anchor.GetAttribute("href"); + if (string.IsNullOrWhiteSpace(href)) + { + continue; + } + + if (Uri.TryCreate(baseUri, href, out var resolved) && Validation.LooksLikeHttpUrl(resolved.ToString())) + { + links.Add(resolved.ToString()); + } + } + + return links.Count == 0 ? Array.Empty() : links.ToArray(); + } + catch + { + return Array.Empty(); + } + } + + internal static IReadOnlyCollection MergeMitigations(IReadOnlyCollection? existing, IReadOnlyCollection incoming) + { + if ((existing is null || existing.Count == 0) && (incoming is null || incoming.Count == 0)) + { + return Array.Empty(); + } + + var set = new HashSet(StringComparer.OrdinalIgnoreCase); + var merged = new List(); + + if (existing is not null) + { + foreach (var mitigation in existing) + { + var value = mitigation?.Trim(); + if (string.IsNullOrWhiteSpace(value)) + { + continue; + } + + if (set.Add(value)) + { + merged.Add(value); + } + } + } + + if (incoming is not null) + { + foreach (var mitigation in incoming) + { + var value = mitigation?.Trim(); + if (string.IsNullOrWhiteSpace(value)) + { + continue; + } + + if (set.Add(value)) + { + merged.Add(value); + } + } + } + + return merged.Count == 0 ? Array.Empty() : merged; + } + + internal static IReadOnlyCollection MergeAttachments(IReadOnlyCollection? existing, IReadOnlyCollection incoming) + { + if ((existing is null || existing.Count == 0) && (incoming is null || incoming.Count == 0)) + { + return Array.Empty(); + } + + var map = new Dictionary(StringComparer.OrdinalIgnoreCase); + + if (existing is not null) + { + foreach (var attachment in existing) + { + if (attachment is null || string.IsNullOrWhiteSpace(attachment.Url)) + { + continue; + } + + map[attachment.Url] = attachment; + } + } + + if (incoming is not null) + { + foreach (var attachment in incoming) + { + if (attachment is null || string.IsNullOrWhiteSpace(attachment.Url)) + { + continue; + } + + if (!map.ContainsKey(attachment.Url) || string.IsNullOrWhiteSpace(map[attachment.Url].Title)) + { + map[attachment.Url] = attachment; + } + } + } + + return map.Count == 0 ? Array.Empty() : map.Values.ToArray(); + } + + internal static IReadOnlyCollection MergeReferences(IReadOnlyCollection? existing, IReadOnlyCollection incoming) + { + var links = new HashSet(existing ?? Array.Empty(), StringComparer.OrdinalIgnoreCase); + foreach (var link in incoming) + { + if (Validation.LooksLikeHttpUrl(link)) + { + links.Add(link); + } + } + + return links.Count == 0 ? Array.Empty() : links.ToArray(); + } + + internal static string? ExtractFirstSentence(string sanitizedHtml) + { + if (string.IsNullOrWhiteSpace(sanitizedHtml)) + { + return null; + } + + var text = Validation.CollapseWhitespace(sanitizedHtml); + if (text.Length <= 280) + { + return text; + } + + var terminator = text.IndexOf('.', StringComparison.Ordinal); + if (terminator <= 0 || terminator > 280) + { + return text[..Math.Min(280, text.Length)].Trim(); + } + + return text[..(terminator + 1)].Trim(); + } + + internal static IReadOnlyDictionary AppendMetadata(IReadOnlyDictionary? metadata, string key, string value) + { + var dictionary = new Dictionary(StringComparer.Ordinal); + + if (metadata is not null) + { + foreach (var pair in metadata) + { + dictionary[pair.Key] = pair.Value; + } + } + + dictionary[key] = value; + return dictionary; + } + + internal static bool ShouldRetryWithFallback(HttpRequestException exception) + { + var message = exception.Message ?? string.Empty; + return message.Contains(" 403", StringComparison.OrdinalIgnoreCase) + || message.Contains("403", StringComparison.OrdinalIgnoreCase) + || message.Contains("406", StringComparison.OrdinalIgnoreCase); + } + + private async Task GetCursorAsync(CancellationToken cancellationToken) + { + var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false); + return state is null ? IcsCisaCursor.Empty : IcsCisaCursor.FromBson(state.Cursor); + } + + private Task UpdateCursorAsync(IcsCisaCursor cursor, CancellationToken cancellationToken) + => _stateRepository.UpdateCursorAsync(SourceName, cursor.ToBsonDocument(), _timeProvider.GetUtcNow(), cancellationToken); +} diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/IcsCisaConnectorPlugin.cs b/src/StellaOps.Feedser.Source.Ics.Cisa/IcsCisaConnectorPlugin.cs new file mode 100644 index 00000000..bf13cb9d --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa/IcsCisaConnectorPlugin.cs @@ -0,0 +1,19 @@ +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Plugin; + +namespace StellaOps.Feedser.Source.Ics.Cisa; + +public sealed class IcsCisaConnectorPlugin : IConnectorPlugin +{ + public const string SourceName = "ics-cisa"; + + public string Name => SourceName; + + public bool IsAvailable(IServiceProvider services) => services is not null; + + public IFeedConnector Create(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + return ActivatorUtilities.CreateInstance(services); + } +} diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/IcsCisaDependencyInjectionRoutine.cs b/src/StellaOps.Feedser.Source.Ics.Cisa/IcsCisaDependencyInjectionRoutine.cs new file mode 100644 index 00000000..3b84d7b7 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa/IcsCisaDependencyInjectionRoutine.cs @@ -0,0 +1,56 @@ +using System; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.DependencyInjection; +using StellaOps.Feedser.Core.Jobs; +using StellaOps.Feedser.Source.Ics.Cisa.Configuration; + +namespace StellaOps.Feedser.Source.Ics.Cisa; + +public sealed class IcsCisaDependencyInjectionRoutine : IDependencyInjectionRoutine +{ + private const string ConfigurationSection = "feedser:sources:ics-cisa"; + + public IServiceCollection Register(IServiceCollection services, IConfiguration configuration) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services.AddIcsCisaConnector(options => + { + configuration.GetSection(ConfigurationSection).Bind(options); + options.Validate(); + }); + + services.AddTransient(); + services.AddTransient(); + services.AddTransient(); + + services.PostConfigure(options => + { + EnsureJob(options, IcsCisaJobKinds.Fetch, typeof(IcsCisaFetchJob)); + EnsureJob(options, IcsCisaJobKinds.Parse, typeof(IcsCisaParseJob)); + EnsureJob(options, IcsCisaJobKinds.Map, typeof(IcsCisaMapJob)); + }); + + return services; + } + + private static void EnsureJob(JobSchedulerOptions options, string kind, Type jobType) + { + ArgumentNullException.ThrowIfNull(options); + + if (options.Definitions.ContainsKey(kind)) + { + return; + } + + options.Definitions[kind] = new JobDefinition( + kind, + jobType, + options.DefaultTimeout, + options.DefaultLeaseDuration, + CronExpression: null, + Enabled: true); + } +} diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/IcsCisaServiceCollectionExtensions.cs b/src/StellaOps.Feedser.Source.Ics.Cisa/IcsCisaServiceCollectionExtensions.cs new file mode 100644 index 00000000..c7b87a93 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa/IcsCisaServiceCollectionExtensions.cs @@ -0,0 +1,60 @@ +using System; +using System.Net; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Options; +using StellaOps.Feedser.Source.Common.Http; +using StellaOps.Feedser.Source.Ics.Cisa.Configuration; +using StellaOps.Feedser.Source.Ics.Cisa.Internal; + +namespace StellaOps.Feedser.Source.Ics.Cisa; + +public static class IcsCisaServiceCollectionExtensions +{ + public static IServiceCollection AddIcsCisaConnector(this IServiceCollection services, Action configure) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configure); + + services.AddOptions() + .Configure(configure) + .PostConfigure(static opts => opts.Validate()); + + services.AddSourceHttpClient(IcsCisaOptions.HttpClientName, (sp, clientOptions) => + { + var options = sp.GetRequiredService>().Value; + clientOptions.BaseAddress = new Uri(options.TopicsEndpoint.GetLeftPart(UriPartial.Authority)); + clientOptions.Timeout = TimeSpan.FromSeconds(45); + clientOptions.UserAgent = "StellaOps.Feedser.IcsCisa/1.0"; + clientOptions.AllowedHosts.Clear(); + clientOptions.AllowedHosts.Add(options.TopicsEndpoint.Host); + clientOptions.AllowedHosts.Add(options.DetailBaseUri.Host); + foreach (var host in options.AdditionalHosts) + { + clientOptions.AllowedHosts.Add(host); + } + clientOptions.DefaultRequestHeaders["Accept"] = "application/rss+xml"; + clientOptions.RequestVersion = options.RequestVersion; + clientOptions.VersionPolicy = options.RequestVersionPolicy; + clientOptions.MaxAttempts = options.MaxAttempts; + clientOptions.BaseDelay = options.BaseDelay; + clientOptions.EnableMultipleHttp2Connections = false; + + clientOptions.ConfigureHandler = handler => + { + handler.AutomaticDecompression = DecompressionMethods.All; + }; + + if (options.ProxyUri is not null) + { + clientOptions.ProxyAddress = options.ProxyUri; + clientOptions.ProxyBypassOnLocal = false; + } + }); + + services.AddSingleton(); + services.AddSingleton(); + services.AddTransient(); + + return services; + } +} diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaAdvisoryDto.cs b/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaAdvisoryDto.cs new file mode 100644 index 00000000..fd3d86c5 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaAdvisoryDto.cs @@ -0,0 +1,56 @@ +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Feedser.Source.Ics.Cisa.Internal; + +public sealed record IcsCisaAdvisoryDto +{ + [JsonPropertyName("advisoryId")] + public required string AdvisoryId { get; init; } + + [JsonPropertyName("title")] + public required string Title { get; init; } + + [JsonPropertyName("link")] + public required string Link { get; init; } + + [JsonPropertyName("summary")] + public string? Summary { get; init; } + + [JsonPropertyName("descriptionHtml")] + public string DescriptionHtml { get; init; } = string.Empty; + + [JsonPropertyName("published")] + public DateTimeOffset Published { get; init; } + + [JsonPropertyName("updated")] + public DateTimeOffset? Updated { get; init; } + + [JsonPropertyName("medical")] + public bool IsMedical { get; init; } + + [JsonPropertyName("aliases")] + public IReadOnlyCollection Aliases { get; init; } = Array.Empty(); + + [JsonPropertyName("cveIds")] + public IReadOnlyCollection CveIds { get; init; } = Array.Empty(); + + [JsonPropertyName("vendors")] + public IReadOnlyCollection Vendors { get; init; } = Array.Empty(); + + [JsonPropertyName("products")] + public IReadOnlyCollection Products { get; init; } = Array.Empty(); + + [JsonPropertyName("references")] + public IReadOnlyCollection References { get; init; } = Array.Empty(); + + [JsonPropertyName("attachments")] + public IReadOnlyCollection Attachments { get; init; } = Array.Empty(); + + [JsonPropertyName("mitigations")] + public IReadOnlyCollection Mitigations { get; init; } = Array.Empty(); + + [JsonPropertyName("detailHtml")] + public string? DetailHtml { get; init; } +} diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaAttachmentDto.cs b/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaAttachmentDto.cs new file mode 100644 index 00000000..0518d058 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaAttachmentDto.cs @@ -0,0 +1,12 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Feedser.Source.Ics.Cisa.Internal; + +public sealed record IcsCisaAttachmentDto +{ + [JsonPropertyName("title")] + public string? Title { get; init; } + + [JsonPropertyName("url")] + public required string Url { get; init; } +} diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaCursor.cs b/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaCursor.cs new file mode 100644 index 00000000..88bd1ce4 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaCursor.cs @@ -0,0 +1,88 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Bson; + +namespace StellaOps.Feedser.Source.Ics.Cisa.Internal; + +internal sealed record IcsCisaCursor( + DateTimeOffset? LastPublished, + IReadOnlyCollection PendingDocuments, + IReadOnlyCollection PendingMappings) +{ + public static IcsCisaCursor Empty { get; } = new(null, Array.Empty(), Array.Empty()); + + public BsonDocument ToBsonDocument() + { + var document = new BsonDocument + { + ["pendingDocuments"] = new BsonArray(PendingDocuments.Select(static id => id.ToString())), + ["pendingMappings"] = new BsonArray(PendingMappings.Select(static id => id.ToString())), + }; + + if (LastPublished.HasValue) + { + document["lastPublished"] = LastPublished.Value.UtcDateTime; + } + + return document; + } + + public static IcsCisaCursor FromBson(BsonDocument? document) + { + if (document is null || document.ElementCount == 0) + { + return Empty; + } + + var lastPublished = document.TryGetValue("lastPublished", out var publishedValue) + ? ParseDate(publishedValue) + : null; + + return new IcsCisaCursor( + lastPublished, + ReadGuidArray(document, "pendingDocuments"), + ReadGuidArray(document, "pendingMappings")); + } + + public IcsCisaCursor WithPendingDocuments(IEnumerable ids) + => this with { PendingDocuments = ids?.Distinct().ToArray() ?? Array.Empty() }; + + public IcsCisaCursor WithPendingMappings(IEnumerable ids) + => this with { PendingMappings = ids?.Distinct().ToArray() ?? Array.Empty() }; + + public IcsCisaCursor WithLastPublished(DateTimeOffset? published) + => this with { LastPublished = published?.ToUniversalTime() }; + + private static DateTimeOffset? ParseDate(BsonValue value) + => value.BsonType switch + { + BsonType.DateTime => DateTime.SpecifyKind(value.ToUniversalTime(), DateTimeKind.Utc), + BsonType.String when DateTimeOffset.TryParse(value.AsString, out var parsed) => parsed.ToUniversalTime(), + _ => null, + }; + + private static IReadOnlyCollection ReadGuidArray(BsonDocument document, string field) + { + if (!document.TryGetValue(field, out var value) || value is not BsonArray array) + { + return Array.Empty(); + } + + var results = new List(array.Count); + foreach (var element in array) + { + if (element is null) + { + continue; + } + + if (Guid.TryParse(element.ToString(), out var guid)) + { + results.Add(guid); + } + } + + return results; + } +} diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaDiagnostics.cs b/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaDiagnostics.cs new file mode 100644 index 00000000..c81cde4a --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaDiagnostics.cs @@ -0,0 +1,171 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.Metrics; + +namespace StellaOps.Feedser.Source.Ics.Cisa.Internal; + +public sealed class IcsCisaDiagnostics : IDisposable +{ + private const string MeterName = "StellaOps.Feedser.Source.Ics.Cisa"; + private const string MeterVersion = "1.0.0"; + + private readonly Meter _meter; + + private readonly Counter _fetchAttempts; + private readonly Counter _fetchSuccess; + private readonly Counter _fetchFailures; + private readonly Counter _fetchNotModified; + private readonly Counter _fetchFallbacks; + private readonly Histogram _fetchDocuments; + + private readonly Counter _parseSuccess; + private readonly Counter _parseFailures; + private readonly Histogram _parseAdvisoryCount; + private readonly Histogram _parseAttachmentCount; + private readonly Histogram _parseDetailCount; + + private readonly Counter _detailSuccess; + private readonly Counter _detailFailures; + + private readonly Counter _mapSuccess; + private readonly Counter _mapFailures; + private readonly Histogram _mapReferenceCount; + private readonly Histogram _mapPackageCount; + private readonly Histogram _mapAliasCount; + + public IcsCisaDiagnostics() + { + _meter = new Meter(MeterName, MeterVersion); + + _fetchAttempts = _meter.CreateCounter("icscisa.fetch.attempts", unit: "operations"); + _fetchSuccess = _meter.CreateCounter("icscisa.fetch.success", unit: "operations"); + _fetchFailures = _meter.CreateCounter("icscisa.fetch.failures", unit: "operations"); + _fetchNotModified = _meter.CreateCounter("icscisa.fetch.not_modified", unit: "operations"); + _fetchFallbacks = _meter.CreateCounter("icscisa.fetch.fallbacks", unit: "operations"); + _fetchDocuments = _meter.CreateHistogram("icscisa.fetch.documents", unit: "documents"); + + _parseSuccess = _meter.CreateCounter("icscisa.parse.success", unit: "documents"); + _parseFailures = _meter.CreateCounter("icscisa.parse.failures", unit: "documents"); + _parseAdvisoryCount = _meter.CreateHistogram("icscisa.parse.advisories", unit: "advisories"); + _parseAttachmentCount = _meter.CreateHistogram("icscisa.parse.attachments", unit: "attachments"); + _parseDetailCount = _meter.CreateHistogram("icscisa.parse.detail_fetches", unit: "fetches"); + + _detailSuccess = _meter.CreateCounter("icscisa.detail.success", unit: "operations"); + _detailFailures = _meter.CreateCounter("icscisa.detail.failures", unit: "operations"); + + _mapSuccess = _meter.CreateCounter("icscisa.map.success", unit: "advisories"); + _mapFailures = _meter.CreateCounter("icscisa.map.failures", unit: "advisories"); + _mapReferenceCount = _meter.CreateHistogram("icscisa.map.references", unit: "references"); + _mapPackageCount = _meter.CreateHistogram("icscisa.map.packages", unit: "packages"); + _mapAliasCount = _meter.CreateHistogram("icscisa.map.aliases", unit: "aliases"); + } + + public void FetchAttempt(string topicId) + { + _fetchAttempts.Add(1, BuildTopicTags(topicId)); + } + + public void FetchSuccess(string topicId, int documentsAdded) + { + var tags = BuildTopicTags(topicId); + _fetchSuccess.Add(1, tags); + if (documentsAdded > 0) + { + _fetchDocuments.Record(documentsAdded, tags); + } + } + + public void FetchNotModified(string topicId) + { + _fetchNotModified.Add(1, BuildTopicTags(topicId)); + } + + public void FetchFallback(string topicId) + { + _fetchFallbacks.Add(1, BuildTopicTags(topicId)); + } + + public void FetchFailure(string topicId) + { + _fetchFailures.Add(1, BuildTopicTags(topicId)); + } + + public void ParseSuccess(string topicId, int advisoryCount, int attachmentCount, int detailFetchCount) + { + var tags = BuildTopicTags(topicId); + _parseSuccess.Add(1, tags); + if (advisoryCount >= 0) + { + _parseAdvisoryCount.Record(advisoryCount, tags); + } + + if (attachmentCount >= 0) + { + _parseAttachmentCount.Record(attachmentCount, tags); + } + + if (detailFetchCount >= 0) + { + _parseDetailCount.Record(detailFetchCount, tags); + } + } + + public void ParseFailure(string topicId) + { + _parseFailures.Add(1, BuildTopicTags(topicId)); + } + + public void DetailFetchSuccess(string advisoryId) + { + _detailSuccess.Add(1, BuildAdvisoryTags(advisoryId)); + } + + public void DetailFetchFailure(string advisoryId) + { + _detailFailures.Add(1, BuildAdvisoryTags(advisoryId)); + } + + public void MapSuccess(string advisoryId, int referenceCount, int packageCount, int aliasCount) + { + var tags = BuildAdvisoryTags(advisoryId); + _mapSuccess.Add(1, tags); + if (referenceCount >= 0) + { + _mapReferenceCount.Record(referenceCount, tags); + } + + if (packageCount >= 0) + { + _mapPackageCount.Record(packageCount, tags); + } + + if (aliasCount >= 0) + { + _mapAliasCount.Record(aliasCount, tags); + } + } + + public void MapFailure(string advisoryId) + { + _mapFailures.Add(1, BuildAdvisoryTags(advisoryId)); + } + + private static KeyValuePair[] BuildTopicTags(string? topicId) + => new[] + { + new KeyValuePair("feedser.source", IcsCisaConnectorPlugin.SourceName), + new KeyValuePair("icscisa.topic", string.IsNullOrWhiteSpace(topicId) ? "unknown" : topicId) + }; + + private static KeyValuePair[] BuildAdvisoryTags(string? advisoryId) + => new[] + { + new KeyValuePair("feedser.source", IcsCisaConnectorPlugin.SourceName), + new KeyValuePair("icscisa.advisory", string.IsNullOrWhiteSpace(advisoryId) ? "unknown" : advisoryId) + }; + + public void Dispose() + { + _meter.Dispose(); + } +} diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaFeedDto.cs b/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaFeedDto.cs new file mode 100644 index 00000000..6c2c82af --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaFeedDto.cs @@ -0,0 +1,16 @@ +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Feedser.Source.Ics.Cisa.Internal; + +public sealed record IcsCisaFeedDto +{ + [JsonPropertyName("topicId")] + public required string TopicId { get; init; } + + [JsonPropertyName("feedUri")] + public required string FeedUri { get; init; } + + [JsonPropertyName("advisories")] + public IReadOnlyCollection Advisories { get; init; } = new List(); +} diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaFeedParser.cs b/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaFeedParser.cs new file mode 100644 index 00000000..2bef37d6 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa/Internal/IcsCisaFeedParser.cs @@ -0,0 +1,402 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.ServiceModel.Syndication; +using System.Text; +using System.Text.RegularExpressions; +using System.Xml; +using AngleSharp.Html.Parser; +using AngleSharp.Html.Dom; +using StellaOps.Feedser.Source.Common.Html; + +namespace StellaOps.Feedser.Source.Ics.Cisa.Internal; + +public sealed class IcsCisaFeedParser +{ + private static readonly Regex AdvisoryIdRegex = new(@"^(?ICS[AM]?A?-?\d{2}-\d{3}[A-Z]?(-\d{2})?)", RegexOptions.IgnoreCase | RegexOptions.Compiled); + private static readonly Regex CveRegex = new(@"CVE-\d{4}-\d{4,}", RegexOptions.IgnoreCase | RegexOptions.Compiled); + + private readonly HtmlContentSanitizer _sanitizer = new(); + private readonly HtmlParser _htmlParser = new(); + + public IReadOnlyCollection Parse(Stream rssStream, bool isMedicalTopic, Uri? topicUri) + { + if (rssStream is null) + { + return Array.Empty(); + } + + using var reader = XmlReader.Create(rssStream, new XmlReaderSettings + { + DtdProcessing = DtdProcessing.Ignore, + IgnoreComments = true, + IgnoreProcessingInstructions = true, + }); + + var feed = SyndicationFeed.Load(reader); + if (feed is null || feed.Items is null) + { + return Array.Empty(); + } + + var advisories = new List(); + foreach (var item in feed.Items) + { + var dto = ConvertItem(item, isMedicalTopic, topicUri); + if (dto is not null) + { + advisories.Add(dto); + } + } + + return advisories; + } + + private IcsCisaAdvisoryDto? ConvertItem(SyndicationItem item, bool isMedicalTopic, Uri? topicUri) + { + if (item is null) + { + return null; + } + + var title = item.Title?.Text?.Trim(); + if (string.IsNullOrWhiteSpace(title)) + { + return null; + } + + var advisoryId = ExtractAdvisoryId(title); + if (string.IsNullOrWhiteSpace(advisoryId)) + { + return null; + } + + var linkUri = item.Links.FirstOrDefault()?.Uri; + if (linkUri is null && !string.IsNullOrWhiteSpace(item.Id) && Uri.TryCreate(item.Id, UriKind.Absolute, out var fallback)) + { + linkUri = fallback; + } + + if (linkUri is null) + { + return null; + } + + var contentHtml = ExtractContentHtml(item); + var sanitizedHtml = _sanitizer.Sanitize(contentHtml, linkUri); + var textContent = ExtractTextContent(sanitizedHtml); + + var aliases = new HashSet(StringComparer.OrdinalIgnoreCase) { advisoryId }; + var cveIds = ExtractCveIds(textContent, aliases); + var vendors = ExtractList(sanitizedHtml, textContent, "Vendor"); + var products = ExtractList(sanitizedHtml, textContent, "Products"); + if (products.Count == 0) + { + products = ExtractList(sanitizedHtml, textContent, "Product"); + } + var attachments = ExtractAttachments(sanitizedHtml, linkUri); + var references = ExtractReferences(sanitizedHtml, linkUri); + + var published = item.PublishDate != DateTimeOffset.MinValue + ? item.PublishDate.ToUniversalTime() + : item.LastUpdatedTime.ToUniversalTime(); + + var updated = item.LastUpdatedTime != DateTimeOffset.MinValue + ? item.LastUpdatedTime.ToUniversalTime() + : (DateTimeOffset?)null; + + return new IcsCisaAdvisoryDto + { + AdvisoryId = advisoryId, + Title = title, + Link = linkUri.ToString(), + Summary = item.Summary?.Text?.Trim(), + DescriptionHtml = sanitizedHtml, + Published = published, + Updated = updated, + IsMedical = isMedicalTopic || advisoryId.StartsWith("ICSMA", StringComparison.OrdinalIgnoreCase), + Aliases = aliases.ToArray(), + CveIds = cveIds, + Vendors = vendors, + Products = products, + References = references, + Attachments = attachments, + }; + } + + private static string ExtractAdvisoryId(string title) + { + if (string.IsNullOrWhiteSpace(title)) + { + return string.Empty; + } + + var colonIndex = title.IndexOf(':'); + var candidate = colonIndex > 0 ? title[..colonIndex] : title; + var match = AdvisoryIdRegex.Match(candidate); + if (match.Success) + { + var id = match.Groups["id"].Value.Trim(); + return id.ToUpperInvariant(); + } + + return candidate.Trim(); + } + + private static string ExtractContentHtml(SyndicationItem item) + { + if (item.Content is TextSyndicationContent textContent) + { + return textContent.Text ?? string.Empty; + } + + if (item.Summary is not null) + { + return item.Summary.Text ?? string.Empty; + } + + if (item.ElementExtensions is not null) + { + foreach (var extension in item.ElementExtensions) + { + try + { + var value = extension.GetObject(); + if (!string.IsNullOrWhiteSpace(value)) + { + return value; + } + } + catch + { + // ignore malformed extensions + } + } + } + + return string.Empty; + } + + private static IReadOnlyCollection ExtractCveIds(string text, HashSet aliases) + { + if (string.IsNullOrWhiteSpace(text)) + { + return Array.Empty(); + } + + var matches = CveRegex.Matches(text); + if (matches.Count == 0) + { + return Array.Empty(); + } + + var ids = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (Match match in matches) + { + if (!match.Success) + { + continue; + } + + var value = match.Value.ToUpperInvariant(); + if (ids.Add(value)) + { + aliases.Add(value); + } + } + + return ids.ToArray(); + } + + private IReadOnlyCollection ExtractList(string sanitizedHtml, string textContent, string key) + { + if (string.IsNullOrWhiteSpace(sanitizedHtml)) + { + return Array.Empty(); + } + + var items = new HashSet(StringComparer.OrdinalIgnoreCase); + + try + { + var document = _htmlParser.ParseDocument(sanitizedHtml); + foreach (var element in document.All) + { + if (element is IHtmlParagraphElement or IHtmlDivElement or IHtmlSpanElement or IHtmlListItemElement) + { + var content = element.TextContent?.Trim(); + if (string.IsNullOrWhiteSpace(content)) + { + continue; + } + + if (content.StartsWith($"{key}:", StringComparison.OrdinalIgnoreCase)) + { + var line = content[(key.Length + 1)..].Trim(); + foreach (var part in line.Split(new[] { ',', ';' }, StringSplitOptions.RemoveEmptyEntries)) + { + var value = part.Trim(); + if (!string.IsNullOrWhiteSpace(value)) + { + items.Add(value); + } + } + } + } + } + } + catch + { + // ignore HTML parsing failures; fallback to text processing below + } + + if (items.Count == 0 && !string.IsNullOrWhiteSpace(textContent)) + { + using var reader = new StringReader(textContent); + string? line; + while ((line = reader.ReadLine()) is not null) + { + if (line.StartsWith($"{key}:", StringComparison.OrdinalIgnoreCase)) + { + var raw = line[(key.Length + 1)..].Trim(); + foreach (var part in raw.Split(new[] { ',', ';' }, StringSplitOptions.RemoveEmptyEntries)) + { + var value = part.Trim(); + if (!string.IsNullOrWhiteSpace(value)) + { + items.Add(value); + } + } + } + } + } + + return items.ToArray(); + } + + private IReadOnlyCollection ExtractAttachments(string sanitizedHtml, Uri linkUri) + { + if (string.IsNullOrWhiteSpace(sanitizedHtml)) + { + return Array.Empty(); + } + + try + { + var document = _htmlParser.ParseDocument(sanitizedHtml); + var attachments = new List(); + + foreach (var anchor in document.QuerySelectorAll("a")) + { + var href = anchor.GetAttribute("href"); + if (string.IsNullOrWhiteSpace(href)) + { + continue; + } + + if (!Uri.TryCreate(linkUri, href, out var resolved)) + { + continue; + } + + var url = resolved.ToString(); + if (!url.EndsWith(".pdf", StringComparison.OrdinalIgnoreCase) && + !url.Contains("/pdf", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + attachments.Add(new IcsCisaAttachmentDto + { + Title = anchor.TextContent?.Trim(), + Url = url, + }); + } + + return attachments.Count == 0 ? Array.Empty() : attachments; + } + catch + { + return Array.Empty(); + } + } + + private IReadOnlyCollection ExtractReferences(string sanitizedHtml, Uri linkUri) + { + if (string.IsNullOrWhiteSpace(sanitizedHtml)) + { + return new[] { linkUri.ToString() }; + } + + try + { + var document = _htmlParser.ParseDocument(sanitizedHtml); + var links = new HashSet(StringComparer.OrdinalIgnoreCase) + { + linkUri.ToString() + }; + + foreach (var anchor in document.QuerySelectorAll("a")) + { + var href = anchor.GetAttribute("href"); + if (string.IsNullOrWhiteSpace(href)) + { + continue; + } + + if (Uri.TryCreate(linkUri, href, out var resolved)) + { + links.Add(resolved.ToString()); + } + } + + return links.ToArray(); + } + catch + { + return new[] { linkUri.ToString() }; + } + } + + private string ExtractTextContent(string sanitizedHtml) + { + if (string.IsNullOrWhiteSpace(sanitizedHtml)) + { + return string.Empty; + } + + try + { + var document = _htmlParser.ParseDocument(sanitizedHtml); + var builder = new StringBuilder(); + var body = document.Body ?? document.DocumentElement; + if (body is null) + { + return string.Empty; + } + + foreach (var node in body.ChildNodes) + { + var text = node.TextContent; + if (string.IsNullOrWhiteSpace(text)) + { + continue; + } + + if (builder.Length > 0) + { + builder.AppendLine(); + } + + builder.Append(text.Trim()); + } + + return builder.ToString(); + } + catch + { + return sanitizedHtml; + } + } +} diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/Jobs.cs b/src/StellaOps.Feedser.Source.Ics.Cisa/Jobs.cs new file mode 100644 index 00000000..44a1f005 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ics.Cisa/Jobs.cs @@ -0,0 +1,46 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Feedser.Core.Jobs; + +namespace StellaOps.Feedser.Source.Ics.Cisa; + +internal static class IcsCisaJobKinds +{ + public const string Fetch = "source:ics-cisa:fetch"; + public const string Parse = "source:ics-cisa:parse"; + public const string Map = "source:ics-cisa:map"; +} + +internal sealed class IcsCisaFetchJob : IJob +{ + private readonly IcsCisaConnector _connector; + + public IcsCisaFetchJob(IcsCisaConnector connector) + => _connector = connector ?? throw new ArgumentNullException(nameof(connector)); + + public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken) + => _connector.FetchAsync(context.Services, cancellationToken); +} + +internal sealed class IcsCisaParseJob : IJob +{ + private readonly IcsCisaConnector _connector; + + public IcsCisaParseJob(IcsCisaConnector connector) + => _connector = connector ?? throw new ArgumentNullException(nameof(connector)); + + public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken) + => _connector.ParseAsync(context.Services, cancellationToken); +} + +internal sealed class IcsCisaMapJob : IJob +{ + private readonly IcsCisaConnector _connector; + + public IcsCisaMapJob(IcsCisaConnector connector) + => _connector = connector ?? throw new ArgumentNullException(nameof(connector)); + + public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken) + => _connector.MapAsync(context.Services, cancellationToken); +} diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/StellaOps.Feedser.Source.Ics.Cisa.csproj b/src/StellaOps.Feedser.Source.Ics.Cisa/StellaOps.Feedser.Source.Ics.Cisa.csproj index f7f2c154..8da045aa 100644 --- a/src/StellaOps.Feedser.Source.Ics.Cisa/StellaOps.Feedser.Source.Ics.Cisa.csproj +++ b/src/StellaOps.Feedser.Source.Ics.Cisa/StellaOps.Feedser.Source.Ics.Cisa.csproj @@ -1,16 +1,28 @@ - - - - net10.0 - enable - enable - - - - - - - - - - + + + + net10.0 + enable + enable + + + + + + + + + + + + + + + + + + <_Parameter1>StellaOps.Feedser.Source.Ics.Cisa.Tests + + + + diff --git a/src/StellaOps.Feedser.Source.Ics.Cisa/TASKS.md b/src/StellaOps.Feedser.Source.Ics.Cisa/TASKS.md index 94d968e7..747a6169 100644 --- a/src/StellaOps.Feedser.Source.Ics.Cisa/TASKS.md +++ b/src/StellaOps.Feedser.Source.Ics.Cisa/TASKS.md @@ -2,11 +2,13 @@ | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |FEEDCONN-ICSCISA-02-001 Document CISA ICS feed contract|BE-Conn-ICS-CISA|Research|**DONE (2025-10-11)** – `https://www.cisa.gov/cybersecurity-advisories/ics-advisories.xml` and legacy `/sites/default/files/feeds/...` return Akamai 403 even with browser UA; HTML landing page blocked as well. Logged full headers (x-reference-error, AkamaiGHost) in `docs/feedser-connector-research-20251011.md` and initiated GovDelivery access request.| -|FEEDCONN-ICSCISA-02-002 Fetch pipeline & cursor storage|BE-Conn-ICS-CISA|Source.Common, Storage.Mongo|**TODO** – Fetcher must support GovDelivery RSS (once credentials provided) and fallback HTML scrape. Persist raw documents, dedupe by advisory slug (`ICSA-YY-NNN`), and store cursor on `Last Updated` date. Implement Akamai-aware retry/backoff with optional proxy. _(2025-10-12: awaiting Ops for GovDelivery credentials; re-test handshake once token arrives.)_ **Coordination:** Ops security to obtain/share GovDelivery token + rotation SOP; Source.Common to review proxy/backoff config once credentials land.| -|FEEDCONN-ICSCISA-02-003 DTO/parser implementation|BE-Conn-ICS-CISA|Source.Common|**TODO** – Create DTOs parsing ICS advisories, extract vendors, products, mitigation steps, references, CVEs.| -|FEEDCONN-ICSCISA-02-004 Canonical mapping & range primitives|BE-Conn-ICS-CISA|Models|**TODO** – Map advisories into canonical records with aliases, references, affected ICS vendor packages, and range primitives. Reference normalized rule expectations in `../StellaOps.Feedser.Merge/RANGE_PRIMITIVES_COORDINATION.md`.
    2025-10-11 research trail: plan for payload `[{"scheme":"semver","type":"range","min":"","minInclusive":true,"max":"","maxInclusive":false,"notes":"ics-cisa:ICSA-YY-NNN"}]`; if advisories use build numbers, capture them as `notes` and flag potential new scheme early.| -|FEEDCONN-ICSCISA-02-005 Deterministic fixtures/tests|QA|Testing|**TODO** – Provide fetch/parse/map regression tests; support `UPDATE_ICS_CISA_FIXTURES=1`.| -|FEEDCONN-ICSCISA-02-006 Telemetry & documentation|DevEx|Docs|**TODO** – Add logging/metrics, update module documentation, and close backlog when production ready.| -|FEEDCONN-ICSCISA-02-007 Detail document inventory|BE-Conn-ICS-CISA|Research|**TODO** – Once access available, capture HTML + PDF attachments from ICS advisories, identify consistent DOM anchors for affected products, and checklist for downloading supplementary ICS alert PDFs.| +|FEEDCONN-ICSCISA-02-002 Fetch pipeline & cursor storage|BE-Conn-ICS-CISA|Source.Common, Storage.Mongo|**DONE (2025-10-16)** – Confirmed proxy knobs + cursor state behave with the refreshed fixtures; ops runbook now captures proxy usage/validation so the fetch stage is production-ready.| +|FEEDCONN-ICSCISA-02-003 DTO/parser implementation|BE-Conn-ICS-CISA|Source.Common|**DONE (2025-10-16)** – Feed parser fixtures updated to retain vendor PDFs as attachments while maintaining reference coverage; console diagnostics removed.| +|FEEDCONN-ICSCISA-02-004 Canonical mapping & range primitives|BE-Conn-ICS-CISA|Models|**DONE (2025-10-16)** – `TryCreateSemVerPrimitive` flow + Mongo deserialiser now persist `exactValue` (`4.2` → `4.2.0`), unblocking canonical snapshots.| +|FEEDCONN-ICSCISA-02-005 Deterministic fixtures/tests|QA|Testing|**DONE (2025-10-16)** – `dotnet test src/StellaOps.Feedser.Source.Ics.Cisa.Tests/...` passes; fixtures assert attachment handling + SemVer semantics.| +|FEEDCONN-ICSCISA-02-006 Telemetry & documentation|DevEx|Docs|**DONE (2025-10-16)** – Ops guide documents attachment checks, SemVer exact values, and proxy guidance; diagnostics remain unchanged.| +|FEEDCONN-ICSCISA-02-007 Detail document inventory|BE-Conn-ICS-CISA|Research|**DONE (2025-10-16)** – Validated canned detail pages vs feed output so attachment inventories stay aligned; archived expectations noted in `HANDOVER.md`.| |FEEDCONN-ICSCISA-02-008 Distribution fallback strategy|BE-Conn-ICS-CISA|Research|**DONE (2025-10-11)** – Outlined GovDelivery token request, HTML scrape + email digest fallback, and dependency on Ops for credential workflow; awaiting decision before fetch implementation.| -|FEEDCONN-ICSCISA-02-009 GovDelivery credential onboarding|Ops, BE-Conn-ICS-CISA|Ops|**TODO** – Coordinate with CISA GovDelivery program for API/email credentials, document rotation/renewal steps, and surface secrets management guidance for Offline Kit.| +|FEEDCONN-ICSCISA-02-009 GovDelivery credential onboarding|Ops, BE-Conn-ICS-CISA|Ops|**DONE (2025-10-14)** – GovDelivery onboarding runbook captured in `docs/ops/feedser-icscisa-operations.md`; secret vault path and Offline Kit handling documented.| +|FEEDCONN-ICSCISA-02-010 Mitigation & SemVer polish|BE-Conn-ICS-CISA|02-003, 02-004|**DONE (2025-10-16)** – Attachment + mitigation references now land as expected and SemVer primitives carry exact values; end-to-end suite green (see `HANDOVER.md`).| +|FEEDCONN-ICSCISA-02-011 Docs & telemetry refresh|DevEx|02-006|**DONE (2025-10-16)** – Ops documentation refreshed (attachments, SemVer validation, proxy knobs) and telemetry notes verified.| diff --git a/src/StellaOps.Feedser.Source.Kisa.Tests/Fixtures/kisa-detail.json b/src/StellaOps.Feedser.Source.Kisa.Tests/Fixtures/kisa-detail.json new file mode 100644 index 00000000..6f466b0e --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa.Tests/Fixtures/kisa-detail.json @@ -0,0 +1,25 @@ +{ + "idx": "5868", + "title": "태그프리 제품 부적절한 권한 검증 취약점", + "summary": "태그프리사의 X-Free Uploader에서 발생하는 부적절한 권한 검증 취약점", + "contentHtml": "

    태그프리사의 X-Free Uploader에서 권한 검증이 미흡하여 임의 파일 삭제가 가능합니다.

    ", + "severity": "High", + "published": "2025-07-31T06:30:23Z", + "updated": "2025-08-01T02:15:00Z", + "cveIds": [ + "CVE-2025-29866" + ], + "references": [ + { + "url": "https://www.tagfree.com/security", + "label": "제조사 공지" + } + ], + "products": [ + { + "vendor": "태그프리", + "name": "X-Free Uploader", + "versions": "XFU 1.0.1.0084 ~ 2.0.1.0034" + } + ] +} diff --git a/src/StellaOps.Feedser.Source.Kisa.Tests/Fixtures/kisa-feed.xml b/src/StellaOps.Feedser.Source.Kisa.Tests/Fixtures/kisa-feed.xml new file mode 100644 index 00000000..78494b8d --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa.Tests/Fixtures/kisa-feed.xml @@ -0,0 +1,15 @@ + + + + KNVD 보안취약점 + https://knvd.krcert.or.kr/ + 테스트 피드 + ko + + 태그프리 제품 부적절한 권한 검증 취약점 + https://knvd.krcert.or.kr/detailDos.do?IDX=5868 + 취약점정보 + Thu, 31 Jul 2025 06:30:23 GMT + + + diff --git a/src/StellaOps.Feedser.Source.Kisa.Tests/KisaConnectorTests.cs b/src/StellaOps.Feedser.Source.Kisa.Tests/KisaConnectorTests.cs new file mode 100644 index 00000000..5683ef86 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa.Tests/KisaConnectorTests.cs @@ -0,0 +1,213 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Diagnostics.Metrics; +using System.Net.Http; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using FluentAssertions; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using MongoDB.Bson; +using StellaOps.Feedser.Source.Common; +using StellaOps.Feedser.Source.Common.Http; +using StellaOps.Feedser.Source.Common.Testing; +using StellaOps.Feedser.Source.Kisa.Configuration; +using StellaOps.Feedser.Source.Kisa.Internal; +using StellaOps.Feedser.Storage.Mongo; +using StellaOps.Feedser.Storage.Mongo.Advisories; +using StellaOps.Feedser.Storage.Mongo.Documents; +using StellaOps.Feedser.Storage.Mongo.Dtos; +using StellaOps.Feedser.Testing; +using Xunit; +using System.Linq; + +namespace StellaOps.Feedser.Source.Kisa.Tests; + +[Collection("mongo-fixture")] +public sealed class KisaConnectorTests : IAsyncLifetime +{ + private static readonly Uri FeedUri = new("https://test.local/rss/securityInfo.do"); + private static readonly Uri DetailApiUri = new("https://test.local/rssDetailData.do?IDX=5868"); + private static readonly Uri DetailPageUri = new("https://test.local/detailDos.do?IDX=5868"); + + private readonly MongoIntegrationFixture _fixture; + private readonly CannedHttpMessageHandler _handler; + + public KisaConnectorTests(MongoIntegrationFixture fixture) + { + _fixture = fixture; + _handler = new CannedHttpMessageHandler(); + } + + [Fact] + public async Task FetchParseMap_ProducesCanonicalAdvisory() + { + await using var provider = await BuildServiceProviderAsync(); + SeedResponses(); + + var connector = provider.GetRequiredService(); + await connector.FetchAsync(provider, CancellationToken.None); + await connector.ParseAsync(provider, CancellationToken.None); + await connector.MapAsync(provider, CancellationToken.None); + + var advisoryStore = provider.GetRequiredService(); + var advisories = await advisoryStore.GetRecentAsync(5, CancellationToken.None); + advisories.Should().HaveCount(1); + + var advisory = advisories[0]; + advisory.AdvisoryKey.Should().Be("5868"); + advisory.Language.Should().Be("ko"); + advisory.Aliases.Should().Contain("CVE-2025-29866"); + advisory.AffectedPackages.Should().Contain(package => package.Identifier.Contains("태그프리")); + advisory.References.Should().Contain(reference => reference.Url == DetailPageUri.ToString()); + + var stateRepository = provider.GetRequiredService(); + var state = await stateRepository.TryGetAsync(KisaConnectorPlugin.SourceName, CancellationToken.None); + state.Should().NotBeNull(); + state!.Cursor.Should().NotBeNull(); + state.Cursor.TryGetValue("pendingDocuments", out var pendingDocs).Should().BeTrue(); + pendingDocs!.AsBsonArray.Should().BeEmpty(); + state.Cursor.TryGetValue("pendingMappings", out var pendingMappings).Should().BeTrue(); + pendingMappings!.AsBsonArray.Should().BeEmpty(); + } + + [Fact] + public async Task Telemetry_RecordsMetrics() + { + await using var provider = await BuildServiceProviderAsync(); + SeedResponses(); + + using var metrics = new KisaMetricCollector(); + + var connector = provider.GetRequiredService(); + await connector.FetchAsync(provider, CancellationToken.None); + await connector.ParseAsync(provider, CancellationToken.None); + await connector.MapAsync(provider, CancellationToken.None); + + Sum(metrics.Measurements, "kisa.feed.success").Should().Be(1); + Sum(metrics.Measurements, "kisa.feed.items").Should().BeGreaterThan(0); + Sum(metrics.Measurements, "kisa.detail.success").Should().Be(1); + Sum(metrics.Measurements, "kisa.detail.failures").Should().Be(0); + Sum(metrics.Measurements, "kisa.parse.success").Should().Be(1); + Sum(metrics.Measurements, "kisa.parse.failures").Should().Be(0); + Sum(metrics.Measurements, "kisa.map.success").Should().Be(1); + Sum(metrics.Measurements, "kisa.map.failures").Should().Be(0); + } + + private async Task BuildServiceProviderAsync() + { + await _fixture.Client.DropDatabaseAsync(_fixture.Database.DatabaseNamespace.DatabaseName); + _handler.Clear(); + + var services = new ServiceCollection(); + services.AddLogging(builder => builder.AddProvider(NullLoggerProvider.Instance)); + services.AddSingleton(_handler); + + services.AddMongoStorage(options => + { + options.ConnectionString = _fixture.Runner.ConnectionString; + options.DatabaseName = _fixture.Database.DatabaseNamespace.DatabaseName; + options.CommandTimeout = TimeSpan.FromSeconds(5); + }); + + services.AddSourceCommon(); + services.AddKisaConnector(options => + { + options.FeedUri = FeedUri; + options.DetailApiUri = new Uri("https://test.local/rssDetailData.do"); + options.DetailPageUri = new Uri("https://test.local/detailDos.do"); + options.RequestDelay = TimeSpan.Zero; + options.MaxAdvisoriesPerFetch = 10; + options.MaxKnownAdvisories = 32; + }); + + services.Configure(KisaOptions.HttpClientName, builderOptions => + { + builderOptions.HttpMessageHandlerBuilderActions.Add(builder => + { + builder.PrimaryHandler = _handler; + }); + }); + + var provider = services.BuildServiceProvider(); + var bootstrapper = provider.GetRequiredService(); + await bootstrapper.InitializeAsync(CancellationToken.None); + return provider; + } + + private void SeedResponses() + { + AddXmlResponse(FeedUri, ReadFixture("kisa-feed.xml")); + AddJsonResponse(DetailApiUri, ReadFixture("kisa-detail.json")); + } + + private void AddXmlResponse(Uri uri, string xml) + { + _handler.AddResponse(uri, () => new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(xml, Encoding.UTF8, "application/rss+xml"), + }); + } + + private void AddJsonResponse(Uri uri, string json) + { + _handler.AddResponse(uri, () => new HttpResponseMessage(System.Net.HttpStatusCode.OK) + { + Content = new StringContent(json, Encoding.UTF8, "application/json"), + }); + } + + private static string ReadFixture(string fileName) + => System.IO.File.ReadAllText(System.IO.Path.Combine(AppContext.BaseDirectory, "Fixtures", fileName)); + + private static long Sum(IEnumerable measurements, string name) + => measurements.Where(m => m.Name == name).Sum(m => m.Value); + + private sealed class KisaMetricCollector : IDisposable + { + private readonly MeterListener _listener; + private readonly ConcurrentBag _measurements = new(); + + public KisaMetricCollector() + { + _listener = new MeterListener + { + InstrumentPublished = (instrument, listener) => + { + if (instrument.Meter.Name == KisaDiagnostics.MeterName) + { + listener.EnableMeasurementEvents(instrument); + } + }, + }; + + _listener.SetMeasurementEventCallback((instrument, measurement, tags, state) => + { + var tagList = new List>(tags.Length); + foreach (var tag in tags) + { + tagList.Add(tag); + } + + _measurements.Add(new MetricMeasurement(instrument.Name, measurement, tagList)); + }); + + _listener.Start(); + } + + public IReadOnlyCollection Measurements => _measurements; + + public void Dispose() => _listener.Dispose(); + + internal sealed record MetricMeasurement(string Name, long Value, IReadOnlyList> Tags); + } + + public Task InitializeAsync() => Task.CompletedTask; + + public Task DisposeAsync() => Task.CompletedTask; +} diff --git a/src/StellaOps.Feedser.Source.Kisa.Tests/StellaOps.Feedser.Source.Kisa.Tests.csproj b/src/StellaOps.Feedser.Source.Kisa.Tests/StellaOps.Feedser.Source.Kisa.Tests.csproj new file mode 100644 index 00000000..9bbb9390 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa.Tests/StellaOps.Feedser.Source.Kisa.Tests.csproj @@ -0,0 +1,24 @@ + + + net10.0 + enable + enable + + + + + + + + + + + + + PreserveNewest + + + PreserveNewest + + + diff --git a/src/StellaOps.Feedser.Source.Kisa/Class1.cs b/src/StellaOps.Feedser.Source.Kisa/Class1.cs deleted file mode 100644 index f8db6a87..00000000 --- a/src/StellaOps.Feedser.Source.Kisa/Class1.cs +++ /dev/null @@ -1,29 +0,0 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Plugin; - -namespace StellaOps.Feedser.Source.Kisa; - -public sealed class KisaConnectorPlugin : IConnectorPlugin -{ - public string Name => "kisa"; - - public bool IsAvailable(IServiceProvider services) => true; - - public IFeedConnector Create(IServiceProvider services) => new StubConnector(Name); - - private sealed class StubConnector : IFeedConnector - { - public StubConnector(string sourceName) => SourceName = sourceName; - - public string SourceName { get; } - - public Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - - public Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - - public Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - } -} - diff --git a/src/StellaOps.Feedser.Source.Kisa/Configuration/KisaOptions.cs b/src/StellaOps.Feedser.Source.Kisa/Configuration/KisaOptions.cs new file mode 100644 index 00000000..c2da1e3a --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa/Configuration/KisaOptions.cs @@ -0,0 +1,97 @@ +using System; + +namespace StellaOps.Feedser.Source.Kisa.Configuration; + +public sealed class KisaOptions +{ + public const string HttpClientName = "feedser.source.kisa"; + + /// + /// Primary RSS feed for security advisories. + /// + public Uri FeedUri { get; set; } = new("https://knvd.krcert.or.kr/rss/securityInfo.do"); + + /// + /// Detail API endpoint template; `IDX` query parameter identifies the advisory. + /// + public Uri DetailApiUri { get; set; } = new("https://knvd.krcert.or.kr/rssDetailData.do"); + + /// + /// Optional HTML detail URI template for provenance. + /// + public Uri DetailPageUri { get; set; } = new("https://knvd.krcert.or.kr/detailDos.do"); + + public TimeSpan RequestTimeout { get; set; } = TimeSpan.FromSeconds(30); + + public TimeSpan RequestDelay { get; set; } = TimeSpan.FromMilliseconds(250); + + public TimeSpan FailureBackoff { get; set; } = TimeSpan.FromMinutes(5); + + public int MaxAdvisoriesPerFetch { get; set; } = 20; + + public int MaxKnownAdvisories { get; set; } = 256; + + public void Validate() + { + if (FeedUri is null || !FeedUri.IsAbsoluteUri) + { + throw new InvalidOperationException("KISA feed URI must be an absolute URI."); + } + + if (DetailApiUri is null || !DetailApiUri.IsAbsoluteUri) + { + throw new InvalidOperationException("KISA detail API URI must be an absolute URI."); + } + + if (DetailPageUri is null || !DetailPageUri.IsAbsoluteUri) + { + throw new InvalidOperationException("KISA detail page URI must be an absolute URI."); + } + + if (RequestTimeout <= TimeSpan.Zero) + { + throw new InvalidOperationException("RequestTimeout must be positive."); + } + + if (RequestDelay < TimeSpan.Zero) + { + throw new InvalidOperationException("RequestDelay cannot be negative."); + } + + if (FailureBackoff <= TimeSpan.Zero) + { + throw new InvalidOperationException("FailureBackoff must be positive."); + } + + if (MaxAdvisoriesPerFetch <= 0) + { + throw new InvalidOperationException("MaxAdvisoriesPerFetch must be greater than zero."); + } + + if (MaxKnownAdvisories <= 0) + { + throw new InvalidOperationException("MaxKnownAdvisories must be greater than zero."); + } + } + + public Uri BuildDetailApiUri(string idx) + { + if (string.IsNullOrWhiteSpace(idx)) + { + throw new ArgumentException("IDX must not be empty", nameof(idx)); + } + + var builder = new UriBuilder(DetailApiUri); + var queryPrefix = string.IsNullOrEmpty(builder.Query) ? string.Empty : builder.Query.TrimStart('?') + "&"; + builder.Query = $"{queryPrefix}IDX={Uri.EscapeDataString(idx)}"; + return builder.Uri; + } + + public Uri BuildDetailPageUri(string idx) + { + var builder = new UriBuilder(DetailPageUri); + var queryPrefix = string.IsNullOrEmpty(builder.Query) ? string.Empty : builder.Query.TrimStart('?') + "&"; + builder.Query = $"{queryPrefix}IDX={Uri.EscapeDataString(idx)}"; + return builder.Uri; + } +} diff --git a/src/StellaOps.Feedser.Source.Kisa/Internal/KisaCursor.cs b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaCursor.cs new file mode 100644 index 00000000..30c31adb --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaCursor.cs @@ -0,0 +1,120 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Bson; + +namespace StellaOps.Feedser.Source.Kisa.Internal; + +internal sealed record KisaCursor( + IReadOnlyCollection PendingDocuments, + IReadOnlyCollection PendingMappings, + IReadOnlyCollection KnownIds, + DateTimeOffset? LastPublished, + DateTimeOffset? LastFetchAt) +{ + private static readonly IReadOnlyCollection EmptyGuids = Array.Empty(); + private static readonly IReadOnlyCollection EmptyStrings = Array.Empty(); + + public static KisaCursor Empty { get; } = new(EmptyGuids, EmptyGuids, EmptyStrings, null, null); + + public KisaCursor WithPendingDocuments(IEnumerable documents) + => this with { PendingDocuments = Distinct(documents) }; + + public KisaCursor WithPendingMappings(IEnumerable mappings) + => this with { PendingMappings = Distinct(mappings) }; + + public KisaCursor WithKnownIds(IEnumerable ids) + => this with { KnownIds = ids?.Distinct(StringComparer.OrdinalIgnoreCase).ToArray() ?? EmptyStrings }; + + public KisaCursor WithLastPublished(DateTimeOffset? published) + => this with { LastPublished = published }; + + public KisaCursor WithLastFetch(DateTimeOffset? timestamp) + => this with { LastFetchAt = timestamp }; + + public BsonDocument ToBsonDocument() + { + var document = new BsonDocument + { + ["pendingDocuments"] = new BsonArray(PendingDocuments.Select(id => id.ToString())), + ["pendingMappings"] = new BsonArray(PendingMappings.Select(id => id.ToString())), + ["knownIds"] = new BsonArray(KnownIds), + }; + + if (LastPublished.HasValue) + { + document["lastPublished"] = LastPublished.Value.UtcDateTime; + } + + if (LastFetchAt.HasValue) + { + document["lastFetchAt"] = LastFetchAt.Value.UtcDateTime; + } + + return document; + } + + public static KisaCursor FromBson(BsonDocument? document) + { + if (document is null || document.ElementCount == 0) + { + return Empty; + } + + var pendingDocuments = ReadGuidArray(document, "pendingDocuments"); + var pendingMappings = ReadGuidArray(document, "pendingMappings"); + var knownIds = ReadStringArray(document, "knownIds"); + var lastPublished = document.TryGetValue("lastPublished", out var publishedValue) + ? ParseDate(publishedValue) + : null; + var lastFetch = document.TryGetValue("lastFetchAt", out var fetchValue) + ? ParseDate(fetchValue) + : null; + + return new KisaCursor(pendingDocuments, pendingMappings, knownIds, lastPublished, lastFetch); + } + + private static IReadOnlyCollection Distinct(IEnumerable? values) + => values?.Distinct().ToArray() ?? EmptyGuids; + + private static IReadOnlyCollection ReadGuidArray(BsonDocument document, string field) + { + if (!document.TryGetValue(field, out var value) || value is not BsonArray array) + { + return EmptyGuids; + } + + var items = new List(array.Count); + foreach (var element in array) + { + if (Guid.TryParse(element?.ToString(), out var id)) + { + items.Add(id); + } + } + + return items; + } + + private static IReadOnlyCollection ReadStringArray(BsonDocument document, string field) + { + if (!document.TryGetValue(field, out var value) || value is not BsonArray array) + { + return EmptyStrings; + } + + return array + .Select(element => element?.ToString() ?? string.Empty) + .Where(static s => !string.IsNullOrWhiteSpace(s)) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static DateTimeOffset? ParseDate(BsonValue value) + => value.BsonType switch + { + BsonType.DateTime => DateTime.SpecifyKind(value.ToUniversalTime(), DateTimeKind.Utc), + BsonType.String when DateTimeOffset.TryParse(value.AsString, out var parsed) => parsed.ToUniversalTime(), + _ => null, + }; +} diff --git a/src/StellaOps.Feedser.Source.Kisa/Internal/KisaDetailParser.cs b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaDetailParser.cs new file mode 100644 index 00000000..61b0ba23 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaDetailParser.cs @@ -0,0 +1,114 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using StellaOps.Feedser.Source.Common.Html; + +namespace StellaOps.Feedser.Source.Kisa.Internal; + +public sealed class KisaDetailParser +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + }; + + private readonly HtmlContentSanitizer _sanitizer; + + public KisaDetailParser(HtmlContentSanitizer sanitizer) + => _sanitizer = sanitizer ?? throw new ArgumentNullException(nameof(sanitizer)); + + public KisaParsedAdvisory Parse(Uri detailApiUri, Uri detailPageUri, byte[] payload) + { + var response = JsonSerializer.Deserialize(payload, SerializerOptions) + ?? throw new InvalidOperationException("KISA detail payload deserialized to null"); + + var idx = response.Idx ?? throw new InvalidOperationException("KISA detail missing IDX"); + var contentHtml = _sanitizer.Sanitize(response.ContentHtml ?? string.Empty, detailPageUri); + + return new KisaParsedAdvisory( + idx, + Normalize(response.Title) ?? idx, + Normalize(response.Summary), + contentHtml, + Normalize(response.Severity), + response.Published, + response.Updated ?? response.Published, + detailApiUri, + detailPageUri, + NormalizeArray(response.CveIds), + MapReferences(response.References), + MapProducts(response.Products)); + } + + private static IReadOnlyList NormalizeArray(string[]? values) + { + if (values is null || values.Length == 0) + { + return Array.Empty(); + } + + return values + .Select(Normalize) + .Where(static value => !string.IsNullOrWhiteSpace(value)) + .Distinct(StringComparer.OrdinalIgnoreCase) + .OrderBy(static value => value, StringComparer.OrdinalIgnoreCase) + .ToArray()!; + } + + private static IReadOnlyList MapReferences(KisaReferenceDto[]? references) + { + if (references is null || references.Length == 0) + { + return Array.Empty(); + } + + return references + .Where(static reference => !string.IsNullOrWhiteSpace(reference.Url)) + .Select(reference => new KisaParsedReference(reference.Url!, Normalize(reference.Label))) + .DistinctBy(static reference => reference.Url, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static IReadOnlyList MapProducts(KisaProductDto[]? products) + { + if (products is null || products.Length == 0) + { + return Array.Empty(); + } + + return products + .Where(static product => !string.IsNullOrWhiteSpace(product.Vendor) || !string.IsNullOrWhiteSpace(product.Name)) + .Select(product => new KisaParsedProduct( + Normalize(product.Vendor), + Normalize(product.Name), + Normalize(product.Versions))) + .ToArray(); + } + + private static string? Normalize(string? value) + => string.IsNullOrWhiteSpace(value) + ? null + : value.Normalize(NormalizationForm.FormC).Trim(); +} + +public sealed record KisaParsedAdvisory( + string AdvisoryId, + string Title, + string? Summary, + string ContentHtml, + string? Severity, + DateTimeOffset? Published, + DateTimeOffset? Modified, + Uri DetailApiUri, + Uri DetailPageUri, + IReadOnlyList CveIds, + IReadOnlyList References, + IReadOnlyList Products); + +public sealed record KisaParsedReference(string Url, string? Label); + +public sealed record KisaParsedProduct(string? Vendor, string? Name, string? Versions); diff --git a/src/StellaOps.Feedser.Source.Kisa/Internal/KisaDetailResponse.cs b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaDetailResponse.cs new file mode 100644 index 00000000..f6f392f9 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaDetailResponse.cs @@ -0,0 +1,58 @@ +using System; +using System.Text.Json.Serialization; + +namespace StellaOps.Feedser.Source.Kisa.Internal; + +internal sealed class KisaDetailResponse +{ + [JsonPropertyName("idx")] + public string? Idx { get; init; } + + [JsonPropertyName("title")] + public string? Title { get; init; } + + [JsonPropertyName("summary")] + public string? Summary { get; init; } + + [JsonPropertyName("contentHtml")] + public string? ContentHtml { get; init; } + + [JsonPropertyName("severity")] + public string? Severity { get; init; } + + [JsonPropertyName("published")] + public DateTimeOffset? Published { get; init; } + + [JsonPropertyName("updated")] + public DateTimeOffset? Updated { get; init; } + + [JsonPropertyName("cveIds")] + public string[]? CveIds { get; init; } + + [JsonPropertyName("references")] + public KisaReferenceDto[]? References { get; init; } + + [JsonPropertyName("products")] + public KisaProductDto[]? Products { get; init; } +} + +internal sealed class KisaReferenceDto +{ + [JsonPropertyName("url")] + public string? Url { get; init; } + + [JsonPropertyName("label")] + public string? Label { get; init; } +} + +internal sealed class KisaProductDto +{ + [JsonPropertyName("vendor")] + public string? Vendor { get; init; } + + [JsonPropertyName("name")] + public string? Name { get; init; } + + [JsonPropertyName("versions")] + public string? Versions { get; init; } +} diff --git a/src/StellaOps.Feedser.Source.Kisa/Internal/KisaDiagnostics.cs b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaDiagnostics.cs new file mode 100644 index 00000000..ea165857 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaDiagnostics.cs @@ -0,0 +1,169 @@ +using System.Diagnostics.Metrics; + +namespace StellaOps.Feedser.Source.Kisa.Internal; + +public sealed class KisaDiagnostics : IDisposable +{ + public const string MeterName = "StellaOps.Feedser.Source.Kisa"; + private const string MeterVersion = "1.0.0"; + + private readonly Meter _meter; + private readonly Counter _feedAttempts; + private readonly Counter _feedSuccess; + private readonly Counter _feedFailures; + private readonly Counter _feedItems; + private readonly Counter _detailAttempts; + private readonly Counter _detailSuccess; + private readonly Counter _detailUnchanged; + private readonly Counter _detailFailures; + private readonly Counter _parseAttempts; + private readonly Counter _parseSuccess; + private readonly Counter _parseFailures; + private readonly Counter _mapSuccess; + private readonly Counter _mapFailures; + private readonly Counter _cursorUpdates; + + public KisaDiagnostics() + { + _meter = new Meter(MeterName, MeterVersion); + _feedAttempts = _meter.CreateCounter( + name: "kisa.feed.attempts", + unit: "operations", + description: "Number of RSS fetch attempts performed for the KISA connector."); + _feedSuccess = _meter.CreateCounter( + name: "kisa.feed.success", + unit: "operations", + description: "Number of RSS fetch attempts that completed successfully."); + _feedFailures = _meter.CreateCounter( + name: "kisa.feed.failures", + unit: "operations", + description: "Number of RSS fetch attempts that failed."); + _feedItems = _meter.CreateCounter( + name: "kisa.feed.items", + unit: "items", + description: "Number of feed items returned by successful RSS fetches."); + _detailAttempts = _meter.CreateCounter( + name: "kisa.detail.attempts", + unit: "documents", + description: "Number of advisory detail fetch attempts."); + _detailSuccess = _meter.CreateCounter( + name: "kisa.detail.success", + unit: "documents", + description: "Number of advisory detail documents fetched successfully."); + _detailUnchanged = _meter.CreateCounter( + name: "kisa.detail.unchanged", + unit: "documents", + description: "Number of advisory detail fetches that returned HTTP 304 (no change)."); + _detailFailures = _meter.CreateCounter( + name: "kisa.detail.failures", + unit: "documents", + description: "Number of advisory detail fetch attempts that failed."); + _parseAttempts = _meter.CreateCounter( + name: "kisa.parse.attempts", + unit: "documents", + description: "Number of advisory documents queued for parsing."); + _parseSuccess = _meter.CreateCounter( + name: "kisa.parse.success", + unit: "documents", + description: "Number of advisory documents parsed successfully into DTOs."); + _parseFailures = _meter.CreateCounter( + name: "kisa.parse.failures", + unit: "documents", + description: "Number of advisory documents that failed parsing."); + _mapSuccess = _meter.CreateCounter( + name: "kisa.map.success", + unit: "advisories", + description: "Number of canonical advisories produced by the mapper."); + _mapFailures = _meter.CreateCounter( + name: "kisa.map.failures", + unit: "advisories", + description: "Number of advisories that failed to map."); + _cursorUpdates = _meter.CreateCounter( + name: "kisa.cursor.updates", + unit: "updates", + description: "Number of times the published cursor advanced."); + } + + public void FeedAttempt() => _feedAttempts.Add(1); + + public void FeedSuccess(int itemCount) + { + _feedSuccess.Add(1); + if (itemCount > 0) + { + _feedItems.Add(itemCount); + } + } + + public void FeedFailure(string reason) + => _feedFailures.Add(1, GetReasonTags(reason)); + + public void DetailAttempt(string? category) + => _detailAttempts.Add(1, GetCategoryTags(category)); + + public void DetailSuccess(string? category) + => _detailSuccess.Add(1, GetCategoryTags(category)); + + public void DetailUnchanged(string? category) + => _detailUnchanged.Add(1, GetCategoryTags(category)); + + public void DetailFailure(string? category, string reason) + => _detailFailures.Add(1, GetCategoryReasonTags(category, reason)); + + public void ParseAttempt(string? category) + => _parseAttempts.Add(1, GetCategoryTags(category)); + + public void ParseSuccess(string? category) + => _parseSuccess.Add(1, GetCategoryTags(category)); + + public void ParseFailure(string? category, string reason) + => _parseFailures.Add(1, GetCategoryReasonTags(category, reason)); + + public void MapSuccess(string? severity) + => _mapSuccess.Add(1, GetSeverityTags(severity)); + + public void MapFailure(string? severity, string reason) + => _mapFailures.Add(1, GetSeverityReasonTags(severity, reason)); + + public void CursorAdvanced() + => _cursorUpdates.Add(1); + + public Meter Meter => _meter; + + public void Dispose() => _meter.Dispose(); + + private static KeyValuePair[] GetCategoryTags(string? category) + => new[] + { + new KeyValuePair("category", Normalize(category)) + }; + + private static KeyValuePair[] GetCategoryReasonTags(string? category, string reason) + => new[] + { + new KeyValuePair("category", Normalize(category)), + new KeyValuePair("reason", Normalize(reason)), + }; + + private static KeyValuePair[] GetSeverityTags(string? severity) + => new[] + { + new KeyValuePair("severity", Normalize(severity)), + }; + + private static KeyValuePair[] GetSeverityReasonTags(string? severity, string reason) + => new[] + { + new KeyValuePair("severity", Normalize(severity)), + new KeyValuePair("reason", Normalize(reason)), + }; + + private static KeyValuePair[] GetReasonTags(string reason) + => new[] + { + new KeyValuePair("reason", Normalize(reason)), + }; + + private static string Normalize(string? value) + => string.IsNullOrWhiteSpace(value) ? "unknown" : value!; +} diff --git a/src/StellaOps.Feedser.Source.Kisa/Internal/KisaDocumentMetadata.cs b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaDocumentMetadata.cs new file mode 100644 index 00000000..8ac285d3 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaDocumentMetadata.cs @@ -0,0 +1,29 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Feedser.Source.Kisa.Internal; + +internal static class KisaDocumentMetadata +{ + public static Dictionary CreateMetadata(KisaFeedItem item) + { + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["kisa.idx"] = item.AdvisoryId, + ["kisa.detailPage"] = item.DetailPageUri.ToString(), + ["kisa.published"] = item.Published.ToString("O"), + }; + + if (!string.IsNullOrWhiteSpace(item.Title)) + { + metadata["kisa.title"] = item.Title!; + } + + if (!string.IsNullOrWhiteSpace(item.Category)) + { + metadata["kisa.category"] = item.Category!; + } + + return metadata; + } +} diff --git a/src/StellaOps.Feedser.Source.Kisa/Internal/KisaFeedClient.cs b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaFeedClient.cs new file mode 100644 index 00000000..413b1983 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaFeedClient.cs @@ -0,0 +1,116 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using System.Xml.Linq; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Feedser.Source.Kisa.Configuration; + +namespace StellaOps.Feedser.Source.Kisa.Internal; + +public sealed class KisaFeedClient +{ + private readonly IHttpClientFactory _httpClientFactory; + private readonly KisaOptions _options; + private readonly ILogger _logger; + + public KisaFeedClient( + IHttpClientFactory httpClientFactory, + IOptions options, + ILogger logger) + { + _httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); + _options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options)); + _options.Validate(); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task> LoadAsync(CancellationToken cancellationToken) + { + var client = _httpClientFactory.CreateClient(KisaOptions.HttpClientName); + + using var request = new HttpRequestMessage(HttpMethod.Get, _options.FeedUri); + request.Headers.TryAddWithoutValidation("Accept", "application/rss+xml, application/xml;q=0.9, text/xml;q=0.8"); + using var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + var document = XDocument.Load(stream); + + var items = new List(); + foreach (var element in document.Descendants("item")) + { + cancellationToken.ThrowIfCancellationRequested(); + + var link = element.Element("link")?.Value?.Trim(); + if (string.IsNullOrWhiteSpace(link)) + { + continue; + } + + if (!TryExtractIdx(link, out var idx)) + { + continue; + } + + var title = element.Element("title")?.Value?.Trim(); + var category = element.Element("category")?.Value?.Trim(); + var published = ParseDate(element.Element("pubDate")?.Value); + var detailApiUri = _options.BuildDetailApiUri(idx); + var detailPageUri = _options.BuildDetailPageUri(idx); + + items.Add(new KisaFeedItem(idx, detailApiUri, detailPageUri, published, title, category)); + } + + return items; + } + + private static bool TryExtractIdx(string link, out string idx) + { + idx = string.Empty; + if (string.IsNullOrWhiteSpace(link)) + { + return false; + } + + if (!Uri.TryCreate(link, UriKind.Absolute, out var uri)) + { + return false; + } + + var query = uri.Query?.TrimStart('?'); + if (string.IsNullOrEmpty(query)) + { + return false; + } + + foreach (var pair in query.Split('&', StringSplitOptions.RemoveEmptyEntries)) + { + var separatorIndex = pair.IndexOf('='); + if (separatorIndex <= 0) + { + continue; + } + + var key = pair[..separatorIndex].Trim(); + if (!key.Equals("IDX", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + idx = Uri.UnescapeDataString(pair[(separatorIndex + 1)..]); + return !string.IsNullOrWhiteSpace(idx); + } + + return false; + } + + private static DateTimeOffset ParseDate(string? value) + => DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var parsed) + ? parsed + : DateTimeOffset.UtcNow; +} diff --git a/src/StellaOps.Feedser.Source.Kisa/Internal/KisaFeedItem.cs b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaFeedItem.cs new file mode 100644 index 00000000..3cc74e91 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaFeedItem.cs @@ -0,0 +1,11 @@ +using System; + +namespace StellaOps.Feedser.Source.Kisa.Internal; + +public sealed record KisaFeedItem( + string AdvisoryId, + Uri DetailApiUri, + Uri DetailPageUri, + DateTimeOffset Published, + string? Title, + string? Category); diff --git a/src/StellaOps.Feedser.Source.Kisa/Internal/KisaMapper.cs b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaMapper.cs new file mode 100644 index 00000000..0fb1592a --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa/Internal/KisaMapper.cs @@ -0,0 +1,145 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using StellaOps.Feedser.Models; +using StellaOps.Feedser.Storage.Mongo.Documents; + +namespace StellaOps.Feedser.Source.Kisa.Internal; + +internal static class KisaMapper +{ + public static Advisory Map(KisaParsedAdvisory dto, DocumentRecord document, DateTimeOffset recordedAt) + { + ArgumentNullException.ThrowIfNull(dto); + ArgumentNullException.ThrowIfNull(document); + + var aliases = BuildAliases(dto); + var references = BuildReferences(dto, recordedAt); + var packages = BuildPackages(dto, recordedAt); + var provenance = new AdvisoryProvenance( + KisaConnectorPlugin.SourceName, + "advisory", + dto.AdvisoryId, + recordedAt, + new[] { ProvenanceFieldMasks.Advisory }); + + return new Advisory( + advisoryKey: dto.AdvisoryId, + title: dto.Title, + summary: dto.Summary, + language: "ko", + published: dto.Published, + modified: dto.Modified, + severity: dto.Severity?.ToLowerInvariant(), + exploitKnown: false, + aliases: aliases, + references: references, + affectedPackages: packages, + cvssMetrics: Array.Empty(), + provenance: new[] { provenance }); + } + + private static IReadOnlyList BuildAliases(KisaParsedAdvisory dto) + { + var aliases = new List(capacity: dto.CveIds.Count + 1) { dto.AdvisoryId }; + aliases.AddRange(dto.CveIds); + return aliases + .Where(static alias => !string.IsNullOrWhiteSpace(alias)) + .Distinct(StringComparer.OrdinalIgnoreCase) + .OrderBy(static alias => alias, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static IReadOnlyList BuildReferences(KisaParsedAdvisory dto, DateTimeOffset recordedAt) + { + var references = new List + { + new(dto.DetailPageUri.ToString(), "details", "kisa", null, new AdvisoryProvenance( + KisaConnectorPlugin.SourceName, + "reference", + dto.DetailPageUri.ToString(), + recordedAt, + new[] { ProvenanceFieldMasks.References })) + }; + + foreach (var reference in dto.References) + { + if (string.IsNullOrWhiteSpace(reference.Url)) + { + continue; + } + + references.Add(new AdvisoryReference( + reference.Url, + kind: "reference", + sourceTag: "kisa", + summary: reference.Label, + provenance: new AdvisoryProvenance( + KisaConnectorPlugin.SourceName, + "reference", + reference.Url, + recordedAt, + new[] { ProvenanceFieldMasks.References }))); + } + + return references + .DistinctBy(static reference => reference.Url, StringComparer.OrdinalIgnoreCase) + .OrderBy(static reference => reference.Url, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static IReadOnlyList BuildPackages(KisaParsedAdvisory dto, DateTimeOffset recordedAt) + { + if (dto.Products.Count == 0) + { + return Array.Empty(); + } + + var packages = new List(dto.Products.Count); + foreach (var product in dto.Products) + { + var vendor = string.IsNullOrWhiteSpace(product.Vendor) ? "Unknown" : product.Vendor!; + var name = product.Name; + var identifier = string.IsNullOrWhiteSpace(name) ? vendor : $"{vendor} {name}"; + + var provenance = new AdvisoryProvenance( + KisaConnectorPlugin.SourceName, + "package", + identifier, + recordedAt, + new[] { ProvenanceFieldMasks.AffectedPackages }); + + var versionRanges = string.IsNullOrWhiteSpace(product.Versions) + ? Array.Empty() + : new[] + { + new AffectedVersionRange( + rangeKind: "string", + introducedVersion: null, + fixedVersion: null, + lastAffectedVersion: null, + rangeExpression: product.Versions, + provenance: new AdvisoryProvenance( + KisaConnectorPlugin.SourceName, + "package-range", + product.Versions, + recordedAt, + new[] { ProvenanceFieldMasks.VersionRanges })) + }; + + packages.Add(new AffectedPackage( + AffectedPackageTypes.Vendor, + identifier, + platform: null, + versionRanges: versionRanges, + statuses: Array.Empty(), + provenance: new[] { provenance }, + normalizedVersions: Array.Empty())); + } + + return packages + .DistinctBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase) + .OrderBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } +} diff --git a/src/StellaOps.Feedser.Source.Kisa/Jobs.cs b/src/StellaOps.Feedser.Source.Kisa/Jobs.cs new file mode 100644 index 00000000..99e2cacc --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa/Jobs.cs @@ -0,0 +1,22 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Feedser.Core.Jobs; + +namespace StellaOps.Feedser.Source.Kisa; + +internal static class KisaJobKinds +{ + public const string Fetch = "source:kisa:fetch"; +} + +internal sealed class KisaFetchJob : IJob +{ + private readonly KisaConnector _connector; + + public KisaFetchJob(KisaConnector connector) + => _connector = connector ?? throw new ArgumentNullException(nameof(connector)); + + public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken) + => _connector.FetchAsync(context.Services, cancellationToken); +} diff --git a/src/StellaOps.Feedser.Source.Kisa/KisaConnector.cs b/src/StellaOps.Feedser.Source.Kisa/KisaConnector.cs new file mode 100644 index 00000000..2cd61d53 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa/KisaConnector.cs @@ -0,0 +1,404 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Bson; +using StellaOps.Feedser.Source.Common; +using StellaOps.Feedser.Source.Common.Fetch; +using StellaOps.Feedser.Source.Kisa.Configuration; +using StellaOps.Feedser.Source.Kisa.Internal; +using StellaOps.Feedser.Storage.Mongo; +using StellaOps.Feedser.Storage.Mongo.Advisories; +using StellaOps.Feedser.Storage.Mongo.Documents; +using StellaOps.Feedser.Storage.Mongo.Dtos; +using StellaOps.Plugin; + +namespace StellaOps.Feedser.Source.Kisa; + +public sealed class KisaConnector : IFeedConnector +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull, + }; + + private readonly KisaFeedClient _feedClient; + private readonly KisaDetailParser _detailParser; + private readonly SourceFetchService _fetchService; + private readonly RawDocumentStorage _rawDocumentStorage; + private readonly IDocumentStore _documentStore; + private readonly IDtoStore _dtoStore; + private readonly IAdvisoryStore _advisoryStore; + private readonly ISourceStateRepository _stateRepository; + private readonly KisaOptions _options; + private readonly KisaDiagnostics _diagnostics; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + public KisaConnector( + KisaFeedClient feedClient, + KisaDetailParser detailParser, + SourceFetchService fetchService, + RawDocumentStorage rawDocumentStorage, + IDocumentStore documentStore, + IDtoStore dtoStore, + IAdvisoryStore advisoryStore, + ISourceStateRepository stateRepository, + IOptions options, + KisaDiagnostics diagnostics, + TimeProvider? timeProvider, + ILogger logger) + { + _feedClient = feedClient ?? throw new ArgumentNullException(nameof(feedClient)); + _detailParser = detailParser ?? throw new ArgumentNullException(nameof(detailParser)); + _fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService)); + _rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage)); + _documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore)); + _dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore)); + _advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore)); + _stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository)); + _options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options)); + _options.Validate(); + _diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics)); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public string SourceName => KisaConnectorPlugin.SourceName; + + public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + var now = _timeProvider.GetUtcNow(); + _diagnostics.FeedAttempt(); + IReadOnlyList items; + + try + { + items = await _feedClient.LoadAsync(cancellationToken).ConfigureAwait(false); + _diagnostics.FeedSuccess(items.Count); + + if (items.Count > 0) + { + _logger.LogInformation("KISA feed returned {ItemCount} advisories", items.Count); + } + else + { + _logger.LogDebug("KISA feed returned no advisories"); + } + } + catch (Exception ex) + { + _diagnostics.FeedFailure(ex.GetType().Name); + _logger.LogError(ex, "KISA feed fetch failed"); + await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false); + throw; + } + + if (items.Count == 0) + { + await UpdateCursorAsync(cursor.WithLastFetch(now), cancellationToken).ConfigureAwait(false); + return; + } + + var pendingDocuments = cursor.PendingDocuments.ToHashSet(); + var pendingMappings = cursor.PendingMappings.ToHashSet(); + var knownIds = new HashSet(cursor.KnownIds, StringComparer.OrdinalIgnoreCase); + var processed = 0; + var latestPublished = cursor.LastPublished ?? DateTimeOffset.MinValue; + + foreach (var item in items.OrderByDescending(static i => i.Published)) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (knownIds.Contains(item.AdvisoryId)) + { + continue; + } + + if (processed >= _options.MaxAdvisoriesPerFetch) + { + break; + } + + var category = item.Category; + _diagnostics.DetailAttempt(category); + + try + { + var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, item.DetailApiUri.ToString(), cancellationToken).ConfigureAwait(false); + var request = new SourceFetchRequest(KisaOptions.HttpClientName, SourceName, item.DetailApiUri) + { + Metadata = KisaDocumentMetadata.CreateMetadata(item), + AcceptHeaders = new[] { "application/json", "text/json" }, + ETag = existing?.Etag, + LastModified = existing?.LastModified, + TimeoutOverride = _options.RequestTimeout, + }; + + var result = await _fetchService.FetchAsync(request, cancellationToken).ConfigureAwait(false); + if (result.IsNotModified) + { + _diagnostics.DetailUnchanged(category); + _logger.LogDebug("KISA detail {Idx} unchanged ({Category})", item.AdvisoryId, category ?? "unknown"); + knownIds.Add(item.AdvisoryId); + continue; + } + + if (!result.IsSuccess || result.Document is null) + { + _diagnostics.DetailFailure(category, "empty-document"); + _logger.LogWarning("KISA detail fetch returned no document for {Idx}", item.AdvisoryId); + continue; + } + + pendingDocuments.Add(result.Document.Id); + pendingMappings.Remove(result.Document.Id); + knownIds.Add(item.AdvisoryId); + processed++; + _diagnostics.DetailSuccess(category); + _logger.LogInformation( + "KISA fetched detail for {Idx} (documentId={DocumentId}, category={Category})", + item.AdvisoryId, + result.Document.Id, + category ?? "unknown"); + + if (_options.RequestDelay > TimeSpan.Zero) + { + await Task.Delay(_options.RequestDelay, cancellationToken).ConfigureAwait(false); + } + } + catch (Exception ex) + { + _diagnostics.DetailFailure(category, ex.GetType().Name); + _logger.LogError(ex, "KISA detail fetch failed for {Idx}", item.AdvisoryId); + await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false); + throw; + } + + if (item.Published > latestPublished) + { + latestPublished = item.Published; + _diagnostics.CursorAdvanced(); + _logger.LogDebug("KISA advanced published cursor to {Published:O}", latestPublished); + } + } + + var trimmedKnown = knownIds.Count > _options.MaxKnownAdvisories + ? knownIds.OrderByDescending(id => id, StringComparer.OrdinalIgnoreCase) + .Take(_options.MaxKnownAdvisories) + .ToArray() + : knownIds.ToArray(); + + var updatedCursor = cursor + .WithPendingDocuments(pendingDocuments) + .WithPendingMappings(pendingMappings) + .WithKnownIds(trimmedKnown) + .WithLastPublished(latestPublished == DateTimeOffset.MinValue ? cursor.LastPublished : latestPublished) + .WithLastFetch(now); + + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + _logger.LogInformation("KISA fetch stored {Processed} new documents (knownIds={KnownCount})", processed, trimmedKnown.Length); + } + + public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + if (cursor.PendingDocuments.Count == 0) + { + return; + } + + var remainingDocuments = cursor.PendingDocuments.ToHashSet(); + var pendingMappings = cursor.PendingMappings.ToHashSet(); + var now = _timeProvider.GetUtcNow(); + + foreach (var documentId in cursor.PendingDocuments) + { + cancellationToken.ThrowIfCancellationRequested(); + + var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); + if (document is null) + { + _diagnostics.ParseFailure(null, "document-missing"); + _logger.LogWarning("KISA document {DocumentId} missing during parse", documentId); + remainingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + continue; + } + + var category = GetCategory(document); + if (!document.GridFsId.HasValue) + { + _diagnostics.ParseFailure(category, "missing-gridfs"); + _logger.LogWarning("KISA document {DocumentId} missing GridFS payload", document.Id); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + remainingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + continue; + } + + _diagnostics.ParseAttempt(category); + + byte[] payload; + try + { + payload = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _diagnostics.ParseFailure(category, "download"); + _logger.LogError(ex, "KISA unable to download document {DocumentId}", document.Id); + throw; + } + + KisaParsedAdvisory parsed; + try + { + var apiUri = new Uri(document.Uri); + var pageUri = document.Metadata is not null && document.Metadata.TryGetValue("kisa.detailPage", out var pageValue) + ? new Uri(pageValue) + : apiUri; + parsed = _detailParser.Parse(apiUri, pageUri, payload); + } + catch (Exception ex) + { + _diagnostics.ParseFailure(category, "parse"); + _logger.LogError(ex, "KISA failed to parse detail {DocumentId}", document.Id); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + remainingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + continue; + } + _diagnostics.ParseSuccess(category); + _logger.LogDebug("KISA parsed detail for {DocumentId} ({Category})", document.Id, category ?? "unknown"); + + var dtoBson = BsonDocument.Parse(JsonSerializer.Serialize(parsed, SerializerOptions)); + var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "kisa.detail.v1", dtoBson, now); + await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false); + + remainingDocuments.Remove(documentId); + pendingMappings.Add(document.Id); + } + + var updatedCursor = cursor + .WithPendingDocuments(remainingDocuments) + .WithPendingMappings(pendingMappings); + + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + } + + public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + if (cursor.PendingMappings.Count == 0) + { + return; + } + + var pendingMappings = cursor.PendingMappings.ToHashSet(); + + foreach (var documentId in cursor.PendingMappings) + { + cancellationToken.ThrowIfCancellationRequested(); + + var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); + if (document is null) + { + _diagnostics.MapFailure(null, "document-missing"); + _logger.LogWarning("KISA document {DocumentId} missing during map", documentId); + pendingMappings.Remove(documentId); + continue; + } + + var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false); + if (dtoRecord is null) + { + _diagnostics.MapFailure(null, "dto-missing"); + _logger.LogWarning("KISA DTO missing for document {DocumentId}", document.Id); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + continue; + } + + KisaParsedAdvisory? parsed; + try + { + parsed = JsonSerializer.Deserialize(dtoRecord.Payload.ToJson(), SerializerOptions); + } + catch (Exception ex) + { + _diagnostics.MapFailure(null, "dto-deserialize"); + _logger.LogError(ex, "KISA failed to deserialize DTO for document {DocumentId}", document.Id); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + continue; + } + + if (parsed is null) + { + _diagnostics.MapFailure(null, "dto-null"); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + continue; + } + + try + { + var advisory = KisaMapper.Map(parsed, document, dtoRecord.ValidatedAt); + await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + _diagnostics.MapSuccess(parsed.Severity); + _logger.LogInformation("KISA mapped advisory {AdvisoryId} (severity={Severity})", parsed.AdvisoryId, parsed.Severity ?? "unknown"); + } + catch (Exception ex) + { + _diagnostics.MapFailure(parsed.Severity, "map"); + _logger.LogError(ex, "KISA mapping failed for document {DocumentId}", document.Id); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + } + } + + var updatedCursor = cursor.WithPendingMappings(pendingMappings); + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + } + + private static string? GetCategory(DocumentRecord document) + { + if (document.Metadata is null) + { + return null; + } + + return document.Metadata.TryGetValue("kisa.category", out var category) + ? category + : null; + } + + private async Task GetCursorAsync(CancellationToken cancellationToken) + { + var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false); + return state is null ? KisaCursor.Empty : KisaCursor.FromBson(state.Cursor); + } + + private Task UpdateCursorAsync(KisaCursor cursor, CancellationToken cancellationToken) + { + var document = cursor.ToBsonDocument(); + var completedAt = cursor.LastFetchAt ?? _timeProvider.GetUtcNow(); + return _stateRepository.UpdateCursorAsync(SourceName, document, completedAt, cancellationToken); + } +} diff --git a/src/StellaOps.Feedser.Source.Kisa/KisaConnectorPlugin.cs b/src/StellaOps.Feedser.Source.Kisa/KisaConnectorPlugin.cs new file mode 100644 index 00000000..cd7f8882 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa/KisaConnectorPlugin.cs @@ -0,0 +1,21 @@ +using System; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Plugin; + +namespace StellaOps.Feedser.Source.Kisa; + +public sealed class KisaConnectorPlugin : IConnectorPlugin +{ + public const string SourceName = "kisa"; + + public string Name => SourceName; + + public bool IsAvailable(IServiceProvider services) + => services.GetService() is not null; + + public IFeedConnector Create(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + return services.GetRequiredService(); + } +} diff --git a/src/StellaOps.Feedser.Source.Kisa/KisaDependencyInjectionRoutine.cs b/src/StellaOps.Feedser.Source.Kisa/KisaDependencyInjectionRoutine.cs new file mode 100644 index 00000000..ad805e69 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa/KisaDependencyInjectionRoutine.cs @@ -0,0 +1,50 @@ +using System; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.DependencyInjection; +using StellaOps.Feedser.Core.Jobs; +using StellaOps.Feedser.Source.Kisa.Configuration; + +namespace StellaOps.Feedser.Source.Kisa; + +public sealed class KisaDependencyInjectionRoutine : IDependencyInjectionRoutine +{ + private const string ConfigurationSection = "feedser:sources:kisa"; + + public IServiceCollection Register(IServiceCollection services, IConfiguration configuration) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services.AddKisaConnector(options => + { + configuration.GetSection(ConfigurationSection).Bind(options); + options.Validate(); + }); + + services.AddTransient(); + + services.PostConfigure(options => + { + EnsureJob(options, KisaJobKinds.Fetch, typeof(KisaFetchJob)); + }); + + return services; + } + + private static void EnsureJob(JobSchedulerOptions options, string kind, Type jobType) + { + if (options.Definitions.ContainsKey(kind)) + { + return; + } + + options.Definitions[kind] = new JobDefinition( + kind, + jobType, + options.DefaultTimeout, + options.DefaultLeaseDuration, + CronExpression: null, + Enabled: true); + } +} diff --git a/src/StellaOps.Feedser.Source.Kisa/KisaServiceCollectionExtensions.cs b/src/StellaOps.Feedser.Source.Kisa/KisaServiceCollectionExtensions.cs new file mode 100644 index 00000000..06fc6106 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Kisa/KisaServiceCollectionExtensions.cs @@ -0,0 +1,47 @@ +using System; +using System.Net; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; +using StellaOps.Feedser.Source.Common.Html; +using StellaOps.Feedser.Source.Common.Http; +using StellaOps.Feedser.Source.Kisa.Configuration; +using StellaOps.Feedser.Source.Kisa.Internal; + +namespace StellaOps.Feedser.Source.Kisa; + +public static class KisaServiceCollectionExtensions +{ + public static IServiceCollection AddKisaConnector(this IServiceCollection services, Action configure) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configure); + + services.AddOptions() + .Configure(configure) + .PostConfigure(static options => options.Validate()); + + services.AddSourceHttpClient(KisaOptions.HttpClientName, static (sp, clientOptions) => + { + var options = sp.GetRequiredService>().Value; + clientOptions.Timeout = options.RequestTimeout; + clientOptions.UserAgent = "StellaOps.Feedser.Kisa/1.0"; + clientOptions.DefaultRequestHeaders["Accept-Language"] = "ko-KR"; + clientOptions.AllowedHosts.Clear(); + clientOptions.AllowedHosts.Add(options.FeedUri.Host); + clientOptions.AllowedHosts.Add(options.DetailApiUri.Host); + clientOptions.ConfigureHandler = handler => + { + handler.AutomaticDecompression = DecompressionMethods.All; + handler.AllowAutoRedirect = true; + }; + }); + + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.AddSingleton(); + services.AddTransient(); + return services; + } +} diff --git a/src/StellaOps.Feedser.Source.Kisa/StellaOps.Feedser.Source.Kisa.csproj b/src/StellaOps.Feedser.Source.Kisa/StellaOps.Feedser.Source.Kisa.csproj index f7f2c154..2f48c62c 100644 --- a/src/StellaOps.Feedser.Source.Kisa/StellaOps.Feedser.Source.Kisa.csproj +++ b/src/StellaOps.Feedser.Source.Kisa/StellaOps.Feedser.Source.Kisa.csproj @@ -9,8 +9,9 @@ - - - - + + + + + diff --git a/src/StellaOps.Feedser.Source.Kisa/TASKS.md b/src/StellaOps.Feedser.Source.Kisa/TASKS.md index 13231a05..bb67ce07 100644 --- a/src/StellaOps.Feedser.Source.Kisa/TASKS.md +++ b/src/StellaOps.Feedser.Source.Kisa/TASKS.md @@ -2,9 +2,9 @@ | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |FEEDCONN-KISA-02-001 Research KISA advisory feeds|BE-Conn-KISA|Research|**DONE (2025-10-11)** – Located public RSS endpoints (`https://knvd.krcert.or.kr/rss/securityInfo.do`, `.../securityNotice.do`) returning UTF-8 XML with 10-item windows and canonical `detailDos.do?IDX=` links. Logged output structure + header profile in `docs/feedser-connector-research-20251011.md`; outstanding work is parsing the SPA detail payload.| -|FEEDCONN-KISA-02-002 Fetch pipeline & source state|BE-Conn-KISA|Source.Common, Storage.Mongo|**TODO** – HTTP client must honour gzip, set `Accept-Language: ko-KR`, and back off to 5 min cadence (server sets no caching headers). Persist `pubDate` + `IDX` as cursor, derive SHA of entire XML, and capture feed-level `generator` for diagnostics.| -|FEEDCONN-KISA-02-003 Parser & DTO implementation|BE-Conn-KISA|Source.Common|**TODO** – Detail pages load via SPA; trace AJAX calls (likely `/rssDetail.do` or `/domesticVulDetailData.do`) to capture JSON/HTML fragments. Normalise Hangul text to NFC, split CVE IDs embedded in `description`, and map references. Provide optional translation hook for summary while retaining original Korean text in provenance notes.| -|FEEDCONN-KISA-02-004 Canonical mapping & range primitives|BE-Conn-KISA|Models|**TODO** – Map advisories to canonical records with aliases, references, and vendor/language range primitives. Follow localization guidance while aligning SemVer outputs per `../StellaOps.Feedser.Merge/RANGE_PRIMITIVES_COORDINATION.md`.
    2025-10-11 research trail: expected `NormalizedVersions` example `[{"scheme":"semver","type":"range","min":"","minInclusive":true,"max":"","maxInclusive":false,"notes":"kisa:KNVD-*****"}]`; ensure notes carry Hangul identifiers verbatim for provenance.| -|FEEDCONN-KISA-02-005 Deterministic fixtures & tests|QA|Testing|**TODO** – Add regression tests with Korean-language fixtures; support `UPDATE_KISA_FIXTURES=1`.| -|FEEDCONN-KISA-02-006 Telemetry & documentation|DevEx|Docs|**TODO** – Add logging/metrics and document connector configuration once implemented.| +|FEEDCONN-KISA-02-002 Fetch pipeline & source state|BE-Conn-KISA|Source.Common, Storage.Mongo|**DONE (2025-10-14)** – `KisaConnector.FetchAsync` pulls RSS, sets `Accept-Language: ko-KR`, persists detail JSON with IDX metadata, throttles requests, and tracks cursor state (pending docs/mappings, known IDs, published timestamp).| +|FEEDCONN-KISA-02-003 Parser & DTO implementation|BE-Conn-KISA|Source.Common|**DONE (2025-10-14)** – Detail API parsed via `KisaDetailParser` (Hangul NFC normalisation, sanitised HTML, CVE extraction, references/products captured into DTO `kisa.detail.v1`).| +|FEEDCONN-KISA-02-004 Canonical mapping & range primitives|BE-Conn-KISA|Models|**DONE (2025-10-14)** – `KisaMapper` emits vendor packages with range strings, aliases (IDX/CVEs), references, and provenance; advisories default to `ko` language and normalised severity.| +|FEEDCONN-KISA-02-005 Deterministic fixtures & tests|QA|Testing|**DONE (2025-10-14)** – Added `StellaOps.Feedser.Source.Kisa.Tests` with Korean fixtures and fetch→parse→map regression; fixtures regenerate via `UPDATE_KISA_FIXTURES=1`.| +|FEEDCONN-KISA-02-006 Telemetry & documentation|DevEx|Docs|**DONE (2025-10-14)** – Added diagnostics-backed telemetry, structured logs, regression coverage, and published localisation notes in `docs/dev/kisa_connector_notes.md` + fixture guidance for Docs/QA.| |FEEDCONN-KISA-02-007 RSS contract & localisation brief|BE-Conn-KISA|Research|**DONE (2025-10-11)** – Documented RSS URLs, confirmed UTF-8 payload (no additional cookies required), and drafted localisation plan (Hangul glossary + optional MT plugin). Remaining open item: capture SPA detail API contract for full-text translations.| diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/export-sample.xml b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/export-sample.xml new file mode 100644 index 00000000..fb9e7806 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/export-sample.xml @@ -0,0 +1,118 @@ + + + + BDU:2025-00001 + Множественные уязвимости криптопровайдера + Удалённый злоумышленник может вызвать отказ в обслуживании или получить доступ к данным. + Установить обновление 8.2.19.116 защищённого комплекса. + 01.12.2013 + Высокий уровень опасности (базовая оценка CVSS 2.0 составляет 7,5) + Существует в открытом доступе + Уязвимость устранена + Подтверждена производителем + 0 + + AV:N/AC:L/Au:N/C:P/I:P/A:P + + + AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H + + + + ООО «1С-Софт» + 1С:Предприятие + 8.2.18.96 + Windows + + Прикладное ПО информационных систем + + + + ООО «1С-Софт» + 1С:Предприятие + 8.2.19.116 + Не указана + + Прикладное ПО информационных систем + + + + + + Microsoft Corp + Windows + - + 64-bit + + + Microsoft Corp + Windows + - + 32-bit + + + + + CWE-310 + Проблемы использования криптографии + + + + https://advisories.example/BDU-2025-00001 + http://mirror.example/ru-bdu/BDU-2025-00001 + + + CVE-2015-0206 + CVE-2009-3555 + PT-2015-0206 + + Язык разработки ПО – С + Уязвимость кода + Опубликована + + + BDU:2025-00002 + Уязвимость контроллера АСУ ТП + Локальный злоумышленник может повысить привилегии в контроллере. + Производитель готовит обновление микропрограммы. + 15.10.2024 + Средний уровень опасности + Данные уточняются + Информация об устранении отсутствует + Потенциальная уязвимость + 2 + + AV:L/AC:H/Au:S/C:P/I:P/A:P + + + + АО «Системы Управления» + SCADA Controller + 1.0.0;1.0.1 + - + + ПО программно-аппаратного средства АСУ ТП + + + + + + CWE-269 + Неправильное управление привилегиями + + + CWE-287 + Недостаточная аутентификация + + + + www.vendor.example/security/advisories/ctl-2025-01 + + + ICSA-25-123-01 + + Поставщик сообщает об ограниченном наличии эксплойтов. + Уязвимость архитектуры + Опубликована + + diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/ru-bdu-advisories.snapshot.json b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/ru-bdu-advisories.snapshot.json new file mode 100644 index 00000000..58bae6f6 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/ru-bdu-advisories.snapshot.json @@ -0,0 +1,335 @@ +[ + { + "advisoryKey": "BDU:2025-00001", + "affectedPackages": [ + { + "type": "vendor", + "identifier": "ООО «1С-Софт» 1С:Предприятие", + "platform": null, + "versionRanges": [ + { + "fixedVersion": null, + "introducedVersion": null, + "lastAffectedVersion": null, + "primitives": null, + "provenance": { + "source": "ru-bdu", + "kind": "package-range", + "value": "8.2.19.116", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "affectedpackages[].versionranges[]" + ] + }, + "rangeExpression": "8.2.19.116", + "rangeKind": "string" + } + ], + "normalizedVersions": [ + { + "scheme": "ru-bdu.raw", + "type": "exact", + "min": null, + "minInclusive": null, + "max": null, + "maxInclusive": null, + "value": "8.2.19.116", + "notes": null + } + ], + "statuses": [ + { + "provenance": { + "source": "ru-bdu", + "kind": "package-status", + "value": "Подтверждена производителем", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "affectedpackages[].statuses[]" + ] + }, + "status": "affected" + }, + { + "provenance": { + "source": "ru-bdu", + "kind": "package-fix-status", + "value": "Уязвимость устранена", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "affectedpackages[].statuses[]" + ] + }, + "status": "fixed" + } + ], + "provenance": [ + { + "source": "ru-bdu", + "kind": "package", + "value": "ООО «1С-Софт» 1С:Предприятие", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "affectedpackages[]" + ] + } + ] + }, + { + "type": "vendor", + "identifier": "ООО «1С-Софт» 1С:Предприятие", + "platform": "Windows", + "versionRanges": [ + { + "fixedVersion": null, + "introducedVersion": null, + "lastAffectedVersion": null, + "primitives": null, + "provenance": { + "source": "ru-bdu", + "kind": "package-range", + "value": "8.2.18.96", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "affectedpackages[].versionranges[]" + ] + }, + "rangeExpression": "8.2.18.96", + "rangeKind": "string" + } + ], + "normalizedVersions": [ + { + "scheme": "ru-bdu.raw", + "type": "exact", + "min": null, + "minInclusive": null, + "max": null, + "maxInclusive": null, + "value": "8.2.18.96", + "notes": null + } + ], + "statuses": [ + { + "provenance": { + "source": "ru-bdu", + "kind": "package-status", + "value": "Подтверждена производителем", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "affectedpackages[].statuses[]" + ] + }, + "status": "affected" + }, + { + "provenance": { + "source": "ru-bdu", + "kind": "package-fix-status", + "value": "Уязвимость устранена", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "affectedpackages[].statuses[]" + ] + }, + "status": "fixed" + } + ], + "provenance": [ + { + "source": "ru-bdu", + "kind": "package", + "value": "ООО «1С-Софт» 1С:Предприятие", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "affectedpackages[]" + ] + } + ] + } + ], + "aliases": [ + "BDU:2025-00001", + "CVE-2009-3555", + "CVE-2015-0206", + "PT-2015-0206" + ], + "credits": [], + "cvssMetrics": [ + { + "baseScore": 7.5, + "baseSeverity": "high", + "provenance": { + "source": "ru-bdu", + "kind": "cvss", + "value": "CVSS:2.0/AV:N/AC:L/AU:N/C:P/I:P/A:P", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "cvssmetrics[]" + ] + }, + "vector": "CVSS:2.0/AV:N/AC:L/AU:N/C:P/I:P/A:P", + "version": "2.0" + }, + { + "baseScore": 9.8, + "baseSeverity": "critical", + "provenance": { + "source": "ru-bdu", + "kind": "cvss", + "value": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "cvssmetrics[]" + ] + }, + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + "version": "3.1" + } + ], + "exploitKnown": true, + "language": "ru", + "modified": "2013-01-12T00:00:00+00:00", + "provenance": [ + { + "source": "ru-bdu", + "kind": "advisory", + "value": "BDU:2025-00001", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "advisory" + ] + } + ], + "published": "2013-01-12T00:00:00+00:00", + "references": [ + { + "kind": "source", + "provenance": { + "source": "ru-bdu", + "kind": "reference", + "value": "http://mirror.example/ru-bdu/BDU-2025-00001", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "references[]" + ] + }, + "sourceTag": "ru-bdu", + "summary": null, + "url": "http://mirror.example/ru-bdu/BDU-2025-00001" + }, + { + "kind": "source", + "provenance": { + "source": "ru-bdu", + "kind": "reference", + "value": "https://advisories.example/BDU-2025-00001", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "references[]" + ] + }, + "sourceTag": "ru-bdu", + "summary": null, + "url": "https://advisories.example/BDU-2025-00001" + }, + { + "kind": "details", + "provenance": { + "source": "ru-bdu", + "kind": "reference", + "value": "https://bdu.fstec.ru/vul/2025-00001", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "references[]" + ] + }, + "sourceTag": "ru-bdu", + "summary": null, + "url": "https://bdu.fstec.ru/vul/2025-00001" + }, + { + "kind": "cwe", + "provenance": { + "source": "ru-bdu", + "kind": "reference", + "value": "https://cwe.mitre.org/data/definitions/310.html", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "references[]" + ] + }, + "sourceTag": "cwe", + "summary": "Проблемы использования криптографии", + "url": "https://cwe.mitre.org/data/definitions/310.html" + }, + { + "kind": "cve", + "provenance": { + "source": "ru-bdu", + "kind": "reference", + "value": "https://nvd.nist.gov/vuln/detail/CVE-2009-3555", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "references[]" + ] + }, + "sourceTag": "cve", + "summary": "CVE-2009-3555", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2009-3555" + }, + { + "kind": "cve", + "provenance": { + "source": "ru-bdu", + "kind": "reference", + "value": "https://nvd.nist.gov/vuln/detail/CVE-2015-0206", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "references[]" + ] + }, + "sourceTag": "cve", + "summary": "CVE-2015-0206", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2015-0206" + }, + { + "kind": "external", + "provenance": { + "source": "ru-bdu", + "kind": "reference", + "value": "https://ptsecurity.com/PT-2015-0206", + "decisionReason": null, + "recordedAt": "2025-10-14T08:00:00+00:00", + "fieldMask": [ + "references[]" + ] + }, + "sourceTag": "positivetechnologiesadvisory", + "summary": "PT-2015-0206", + "url": "https://ptsecurity.com/PT-2015-0206" + } + ], + "severity": "critical", + "summary": "Удалённый злоумышленник может вызвать отказ в обслуживании или получить доступ к данным.", + "title": "Множественные уязвимости криптопровайдера" + } +] \ No newline at end of file diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/ru-bdu-documents.snapshot.json b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/ru-bdu-documents.snapshot.json new file mode 100644 index 00000000..750704ff --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/ru-bdu-documents.snapshot.json @@ -0,0 +1,11 @@ +[ + { + "metadata": { + "ru-bdu.identifier": "BDU:2025-00001", + "ru-bdu.name": "Множественные уязвимости криптопровайдера" + }, + "sha256": "c43df9c4a75a74b281ff09122bb8f63096a0a73b30df74d73c3bc997019bd4d4", + "status": "mapped", + "uri": "https://bdu.fstec.ru/vul/2025-00001" + } +] \ No newline at end of file diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/ru-bdu-dtos.snapshot.json b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/ru-bdu-dtos.snapshot.json new file mode 100644 index 00000000..26314147 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/ru-bdu-dtos.snapshot.json @@ -0,0 +1,86 @@ +[ + { + "documentUri": "https://bdu.fstec.ru/vul/2025-00001", + "payload": { + "identifier": "BDU:2025-00001", + "name": "Множественные уязвимости криптопровайдера", + "description": "Удалённый злоумышленник может вызвать отказ в обслуживании или получить доступ к данным.", + "solution": "Установить обновление 8.2.19.116 защищённого комплекса.", + "identifyDate": "2013-01-12T00:00:00+00:00", + "severityText": "Высокий уровень опасности (базовая оценка CVSS 2.0 составляет 7,5)", + "cvssVector": "AV:N/AC:L/Au:N/C:P/I:P/A:P", + "cvssScore": 7.5, + "cvss3Vector": "AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + "cvss3Score": 9.8, + "exploitStatus": "Существует в открытом доступе", + "incidentCount": 0, + "fixStatus": "Уязвимость устранена", + "vulStatus": "Подтверждена производителем", + "vulClass": "Уязвимость кода", + "vulState": "Опубликована", + "other": "Язык разработки ПО – С", + "software": [ + { + "vendor": "ООО «1С-Софт»", + "name": "1С:Предприятие", + "version": "8.2.18.96", + "platform": "Windows", + "types": [ + "Прикладное ПО информационных систем" + ] + }, + { + "vendor": "ООО «1С-Софт»", + "name": "1С:Предприятие", + "version": "8.2.19.116", + "platform": "Не указана", + "types": [ + "Прикладное ПО информационных систем" + ] + } + ], + "environment": [ + { + "vendor": "Microsoft Corp", + "name": "Windows", + "version": "-", + "platform": "64-bit" + }, + { + "vendor": "Microsoft Corp", + "name": "Windows", + "version": "-", + "platform": "32-bit" + } + ], + "cwes": [ + { + "identifier": "CWE-310", + "name": "Проблемы использования криптографии" + } + ], + "sources": [ + "https://advisories.example/BDU-2025-00001", + "http://mirror.example/ru-bdu/BDU-2025-00001" + ], + "identifiers": [ + { + "type": "CVE", + "value": "CVE-2015-0206", + "link": "https://nvd.nist.gov/vuln/detail/CVE-2015-0206" + }, + { + "type": "CVE", + "value": "CVE-2009-3555", + "link": "https://nvd.nist.gov/vuln/detail/CVE-2009-3555" + }, + { + "type": "Positive Technologies Advisory", + "value": "PT-2015-0206", + "link": "https://ptsecurity.com/PT-2015-0206" + } + ] + }, + "schemaVersion": "ru-bdu.v1" + } +] \ No newline at end of file diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/ru-bdu-requests.snapshot.json b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/ru-bdu-requests.snapshot.json new file mode 100644 index 00000000..19781784 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/ru-bdu-requests.snapshot.json @@ -0,0 +1,11 @@ +[ + { + "headers": { + "accept": "application/zip,application/octet-stream,application/x-zip-compressed", + "accept-Language": "ru-RU,ru; q=0.9,en-US; q=0.6,en; q=0.4", + "user-Agent": "StellaOps/Feedser,(+https://stella-ops.org)" + }, + "method": "GET", + "uri": "https://bdu.fstec.ru/files/documents/vulxml.zip" + } +] \ No newline at end of file diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/ru-bdu-state.snapshot.json b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/ru-bdu-state.snapshot.json new file mode 100644 index 00000000..c5d453f3 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures/ru-bdu-state.snapshot.json @@ -0,0 +1,5 @@ +{ + "lastSuccessfulFetch": "2025-10-14T08:00:00.0000000+00:00", + "pendingDocuments": [], + "pendingMappings": [] +} \ No newline at end of file diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/RuBduConnectorSnapshotTests.cs b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/RuBduConnectorSnapshotTests.cs new file mode 100644 index 00000000..618bf863 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/RuBduConnectorSnapshotTests.cs @@ -0,0 +1,303 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using MongoDB.Bson; +using MongoDB.Bson.Serialization; +using MongoDB.Driver; +using StellaOps.Feedser.Models; +using StellaOps.Feedser.Source.Common.Testing; +using StellaOps.Feedser.Source.Ru.Bdu; +using StellaOps.Feedser.Source.Ru.Bdu.Configuration; +using StellaOps.Feedser.Source.Ru.Bdu.Internal; +using StellaOps.Feedser.Storage.Mongo; +using StellaOps.Feedser.Storage.Mongo.Advisories; +using StellaOps.Feedser.Storage.Mongo.Documents; +using StellaOps.Feedser.Storage.Mongo.Dtos; +using StellaOps.Feedser.Testing; +using Xunit; +using Xunit.Sdk; + +namespace StellaOps.Feedser.Source.Ru.Bdu.Tests; + +[Collection("mongo-fixture")] +public sealed class RuBduConnectorSnapshotTests : IAsyncLifetime +{ + private const string UpdateFixturesVariable = "UPDATE_BDU_FIXTURES"; + private static readonly Uri ArchiveUri = new("https://bdu.fstec.ru/files/documents/vulxml.zip"); + + private readonly MongoIntegrationFixture _fixture; + private ConnectorTestHarness? _harness; + + public RuBduConnectorSnapshotTests(MongoIntegrationFixture fixture) + { + _fixture = fixture; + } + + [Fact] + public async Task FetchParseMap_ProducesDeterministicSnapshots() + { + var harness = await EnsureHarnessAsync(); + harness.Handler.AddResponse(ArchiveUri, BuildArchiveResponse); + + var connector = harness.ServiceProvider.GetRequiredService(); + await connector.FetchAsync(harness.ServiceProvider, CancellationToken.None); + + var stateRepository = harness.ServiceProvider.GetRequiredService(); + var initialState = await stateRepository.TryGetAsync(RuBduConnectorPlugin.SourceName, CancellationToken.None); + Assert.NotNull(initialState); + var cursorBeforeParse = initialState!.Cursor is null ? RuBduCursor.Empty : RuBduCursor.FromBson(initialState.Cursor); + Assert.NotEmpty(cursorBeforeParse.PendingDocuments); + var expectedDocumentIds = cursorBeforeParse.PendingDocuments.ToArray(); + + await connector.ParseAsync(harness.ServiceProvider, CancellationToken.None); + await connector.MapAsync(harness.ServiceProvider, CancellationToken.None); + + var documentsCollection = _fixture.Database.GetCollection(MongoStorageDefaults.Collections.Document); + var documentCount = await documentsCollection.CountDocumentsAsync(Builders.Filter.Empty); + Assert.True(documentCount > 0, "Expected persisted documents after map stage"); + + var documentsSnapshot = await BuildDocumentsSnapshotAsync(harness.ServiceProvider, expectedDocumentIds); + WriteOrAssertSnapshot(documentsSnapshot, "ru-bdu-documents.snapshot.json"); + + var dtoSnapshot = await BuildDtoSnapshotAsync(harness.ServiceProvider); + WriteOrAssertSnapshot(dtoSnapshot, "ru-bdu-dtos.snapshot.json"); + + var advisoriesSnapshot = await BuildAdvisoriesSnapshotAsync(harness.ServiceProvider); + WriteOrAssertSnapshot(advisoriesSnapshot, "ru-bdu-advisories.snapshot.json"); + + var stateSnapshot = await BuildStateSnapshotAsync(harness.ServiceProvider); + WriteOrAssertSnapshot(stateSnapshot, "ru-bdu-state.snapshot.json"); + + var requestsSnapshot = BuildRequestsSnapshot(harness.Handler.Requests); + WriteOrAssertSnapshot(requestsSnapshot, "ru-bdu-requests.snapshot.json"); + + harness.Handler.AssertNoPendingResponses(); + } + + public Task InitializeAsync() => Task.CompletedTask; + + public async Task DisposeAsync() + { + if (_harness is not null) + { + await _harness.DisposeAsync(); + _harness = null; + } + } + + private async Task EnsureHarnessAsync() + { + if (_harness is not null) + { + return _harness; + } + + var initialTime = new DateTimeOffset(2025, 10, 14, 8, 0, 0, TimeSpan.Zero); + var harness = new ConnectorTestHarness(_fixture, initialTime, RuBduOptions.HttpClientName); + await harness.EnsureServiceProviderAsync(services => + { + services.AddLogging(builder => + { + builder.ClearProviders(); + builder.AddProvider(NullLoggerProvider.Instance); + }); + + services.AddRuBduConnector(options => + { + options.BaseAddress = new Uri("https://bdu.fstec.ru/"); + options.DataArchivePath = "files/documents/vulxml.zip"; + options.MaxVulnerabilitiesPerFetch = 25; + options.RequestTimeout = TimeSpan.FromSeconds(30); + var cacheRoot = Path.Combine(Path.GetTempPath(), "stellaops-tests", _fixture.Database.DatabaseNamespace.DatabaseName, "ru-bdu"); + Directory.CreateDirectory(cacheRoot); + options.CacheDirectory = cacheRoot; + }); + + services.Configure(RuBduOptions.HttpClientName, options => + { + options.HttpMessageHandlerBuilderActions.Add(builder => builder.PrimaryHandler = harness.Handler); + }); + }); + + _harness = harness; + return harness; + } + + private static HttpResponseMessage BuildArchiveResponse() + { + var archiveBytes = CreateArchiveBytes(); + var response = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(archiveBytes), + }; + response.Content.Headers.ContentType = new MediaTypeHeaderValue("application/zip"); + response.Content.Headers.LastModified = new DateTimeOffset(2025, 10, 14, 9, 30, 0, TimeSpan.Zero); + response.Content.Headers.ContentLength = archiveBytes.Length; + return response; + } + + private async Task BuildDocumentsSnapshotAsync(IServiceProvider provider, IReadOnlyCollection documentIds) + { + var documentStore = provider.GetRequiredService(); + var records = new List(documentIds.Count); + + foreach (var documentId in documentIds) + { + var record = await documentStore.FindAsync(documentId, CancellationToken.None); + if (record is null) + { + var existing = await _fixture.Database + .GetCollection("documents") + .Find(Builders.Filter.Empty) + .Project(Builders.Projection.Include("Uri")) + .ToListAsync(CancellationToken.None); + var uris = existing + .Select(document => document.GetValue("Uri", BsonValue.Create(string.Empty)).AsString) + .ToArray(); + throw new XunitException($"Document id not found: {documentId}. Known URIs: {string.Join(", ", uris)}"); + } + + records.Add(new + { + record.Uri, + record.Status, + record.Sha256, + Metadata = record.Metadata is null + ? null + : record.Metadata + .OrderBy(static pair => pair.Key, StringComparer.OrdinalIgnoreCase) + .ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.OrdinalIgnoreCase) + }); + } + + var ordered = records + .OrderBy(static entry => entry?.GetType().GetProperty("Uri")?.GetValue(entry)?.ToString(), StringComparer.Ordinal) + .ToArray(); + + return SnapshotSerializer.ToSnapshot(ordered); + } + + private async Task BuildDtoSnapshotAsync(IServiceProvider provider) + { + var dtoStore = provider.GetRequiredService(); + var documentStore = provider.GetRequiredService(); + var records = await dtoStore.GetBySourceAsync(RuBduConnectorPlugin.SourceName, 25, CancellationToken.None); + + var entries = new List(records.Count); + foreach (var record in records.OrderBy(static r => r.DocumentId)) + { + var document = await documentStore.FindAsync(record.DocumentId, CancellationToken.None); + Assert.NotNull(document); + + var payload = BsonTypeMapper.MapToDotNetValue(record.Payload); + entries.Add(new + { + DocumentUri = document!.Uri, + record.SchemaVersion, + Payload = payload, + }); + } + + return SnapshotSerializer.ToSnapshot(entries.OrderBy(static entry => entry.GetType().GetProperty("DocumentUri")!.GetValue(entry)?.ToString(), StringComparer.Ordinal).ToArray()); + } + + private async Task BuildAdvisoriesSnapshotAsync(IServiceProvider provider) + { + var advisoryStore = provider.GetRequiredService(); + var advisories = await advisoryStore.GetRecentAsync(25, CancellationToken.None); + var ordered = advisories + .OrderBy(static advisory => advisory.AdvisoryKey, StringComparer.Ordinal) + .ToArray(); + return SnapshotSerializer.ToSnapshot(ordered); + } + + private async Task BuildStateSnapshotAsync(IServiceProvider provider) + { + var stateRepository = provider.GetRequiredService(); + var state = await stateRepository.TryGetAsync(RuBduConnectorPlugin.SourceName, CancellationToken.None); + Assert.NotNull(state); + + var cursor = state!.Cursor is null ? RuBduCursor.Empty : RuBduCursor.FromBson(state.Cursor); + var snapshot = new + { + PendingDocuments = cursor.PendingDocuments.Select(static guid => guid.ToString()).OrderBy(static id => id, StringComparer.Ordinal).ToArray(), + PendingMappings = cursor.PendingMappings.Select(static guid => guid.ToString()).OrderBy(static id => id, StringComparer.Ordinal).ToArray(), + LastSuccessfulFetch = cursor.LastSuccessfulFetch?.ToUniversalTime().ToString("O"), + }; + + return SnapshotSerializer.ToSnapshot(snapshot); + } + + private static string BuildRequestsSnapshot(IReadOnlyCollection requests) + { + var ordered = requests + .Select(record => new + { + Method = record.Method.Method, + Uri = record.Uri.ToString(), + Headers = record.Headers + .OrderBy(static kvp => kvp.Key, StringComparer.OrdinalIgnoreCase) + .ToDictionary(static kvp => kvp.Key, static kvp => kvp.Value, StringComparer.OrdinalIgnoreCase), + }) + .OrderBy(static entry => entry.Uri, StringComparer.Ordinal) + .ToArray(); + + return SnapshotSerializer.ToSnapshot(ordered); + } + + private static string ReadFixtureText(string filename) + { + var path = GetSourceFixturePath(filename); + return File.ReadAllText(path, Encoding.UTF8); + } + + private static byte[] CreateArchiveBytes() + { + var xml = ReadFixtureText("export-sample.xml"); + using var buffer = new MemoryStream(); + using (var archive = new ZipArchive(buffer, ZipArchiveMode.Create, leaveOpen: true)) + { + var entry = archive.CreateEntry("export/export.xml", CompressionLevel.NoCompression); + entry.LastWriteTime = new DateTimeOffset(2025, 10, 14, 9, 0, 0, TimeSpan.Zero); + using var entryStream = entry.Open(); + using var writer = new StreamWriter(entryStream, new UTF8Encoding(encoderShouldEmitUTF8Identifier: false)); + writer.Write(xml); + } + + return buffer.ToArray(); + } + + private static bool ShouldUpdateFixtures() + => !string.IsNullOrWhiteSpace(Environment.GetEnvironmentVariable(UpdateFixturesVariable)); + + private static void WriteOrAssertSnapshot(string content, string filename) + { + var path = GetSourceFixturePath(filename); + if (ShouldUpdateFixtures()) + { + Directory.CreateDirectory(Path.GetDirectoryName(path)!); + File.WriteAllText(path, content, Encoding.UTF8); + } + else + { + Assert.True(File.Exists(path), $"Snapshot '{filename}' is missing. Run {UpdateFixturesVariable}=1 dotnet test to regenerate fixtures."); + var expected = File.ReadAllText(path, Encoding.UTF8); + Assert.Equal(expected, content); + } + } + + private static string GetSourceFixturePath(string relativeName) + => Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "Fixtures", relativeName)); +} diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/RuBduMapperTests.cs b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/RuBduMapperTests.cs index 8ab0b61d..d10ae342 100644 --- a/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/RuBduMapperTests.cs +++ b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/RuBduMapperTests.cs @@ -19,24 +19,39 @@ public sealed class RuBduMapperTests Description: "Описание", Solution: "Обновить", IdentifyDate: new DateTimeOffset(2025, 10, 10, 0, 0, 0, TimeSpan.Zero), - SeverityText: "Высокий", + SeverityText: "Высокий уровень опасности", CvssVector: "AV:N/AC:L/Au:N/C:P/I:P/A:P", CvssScore: 7.5, - Cvss3Vector: null, - Cvss3Score: null, + Cvss3Vector: "AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + Cvss3Score: 9.8, ExploitStatus: "Существует", - IncidentCount: 1, - FixStatus: "Устранена", - VulStatus: "Подтверждена", + IncidentCount: 2, + FixStatus: "Уязвимость устранена", + VulStatus: "Подтверждена производителем", VulClass: null, VulState: null, Other: null, Software: new[] { - new RuBduSoftwareDto("ООО Вендор", "Продукт", "1.2.3", "Windows", ImmutableArray.Empty) + new RuBduSoftwareDto( + "ООО Вендор", + "Продукт", + "1.2.3;1.2.4", + "Windows", + new[] { "ПО программно-аппаратного средства АСУ ТП" }.ToImmutableArray()) }.ToImmutableArray(), Environment: ImmutableArray.Empty, - Cwes: new[] { new RuBduCweDto("CWE-79", "XSS") }.ToImmutableArray()); + Cwes: new[] { new RuBduCweDto("CWE-79", "XSS"), new RuBduCweDto("CWE-89", "SQL Injection") }.ToImmutableArray(), + Sources: new[] + { + "https://advisories.example/BDU-2025-12345", + "www.example.com/ru-bdu/BDU-2025-12345" + }.ToImmutableArray(), + Identifiers: new[] + { + new RuBduExternalIdentifierDto("CVE", "CVE-2025-12345", "https://nvd.nist.gov/vuln/detail/CVE-2025-12345"), + new RuBduExternalIdentifierDto("Positive Technologies Advisory", "PT-2025-001", "https://ptsecurity.com/PT-2025-001") + }.ToImmutableArray()); var document = new DocumentRecord( Guid.NewGuid(), @@ -56,10 +71,25 @@ public sealed class RuBduMapperTests Assert.Equal("BDU:2025-12345", advisory.AdvisoryKey); Assert.Contains("BDU:2025-12345", advisory.Aliases); - Assert.Equal("high", advisory.Severity); + Assert.Contains("CVE-2025-12345", advisory.Aliases); + Assert.Equal("critical", advisory.Severity); Assert.True(advisory.ExploitKnown); - Assert.Single(advisory.AffectedPackages); - Assert.Single(advisory.CvssMetrics); - Assert.Contains(advisory.References, reference => reference.Url.Contains("bdu.fstec.ru", StringComparison.OrdinalIgnoreCase)); + + var package = Assert.Single(advisory.AffectedPackages); + Assert.Equal(AffectedPackageTypes.IcsVendor, package.Type); + Assert.Equal(2, package.VersionRanges.Length); + Assert.Equal(2, package.NormalizedVersions.Length); + Assert.All(package.NormalizedVersions, rule => Assert.Equal("ru-bdu.raw", rule.Scheme)); + Assert.Contains(package.NormalizedVersions, rule => rule.Value == "1.2.3"); + Assert.Contains(package.NormalizedVersions, rule => rule.Value == "1.2.4"); + Assert.Contains(package.Statuses, status => status.Status == AffectedPackageStatusCatalog.Affected); + Assert.Contains(package.Statuses, status => status.Status == AffectedPackageStatusCatalog.Fixed); + + Assert.Equal(2, advisory.CvssMetrics.Length); + Assert.Contains(advisory.References, reference => reference.Url == "https://bdu.fstec.ru/vul/2025-12345" && reference.Kind == "details"); + Assert.Contains(advisory.References, reference => reference.Url == "https://nvd.nist.gov/vuln/detail/CVE-2025-12345" && reference.Kind == "cve"); + Assert.Contains(advisory.References, reference => reference.Url == "https://advisories.example/BDU-2025-12345" && reference.Kind == "source"); + Assert.Contains(advisory.References, reference => reference.Url == "https://www.example.com/ru-bdu/BDU-2025-12345" && reference.Kind == "source"); + Assert.Contains(advisory.References, reference => reference.SourceTag == "positivetechnologiesadvisory"); } } diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/RuBduXmlParserTests.cs b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/RuBduXmlParserTests.cs index 8f71cd60..903ef628 100644 --- a/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/RuBduXmlParserTests.cs +++ b/src/StellaOps.Feedser.Source.Ru.Bdu.Tests/RuBduXmlParserTests.cs @@ -1,3 +1,4 @@ +using System.IO; using System.Xml.Linq; using StellaOps.Feedser.Source.Ru.Bdu.Internal; using Xunit; @@ -22,8 +23,11 @@ public sealed class RuBduXmlParserTests Подтверждена производителем 1 - AV:N/AC:L/Au:N/C:P/I:P/A:P + AV:N/AC:L/Au:N/C:P/I:P/A:P + + AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H + ООО «Вендор» @@ -35,6 +39,14 @@ public sealed class RuBduXmlParserTests + + https://advisories.example/BDU-2025-12345 + https://mirror.example/ru-bdu/BDU-2025-12345 + + + CVE-2025-12345 + GHSA-xxxx-yyyy-zzzz + CWE-79 @@ -52,7 +64,30 @@ public sealed class RuBduXmlParserTests Assert.Equal("Уязвимость тестового продукта", dto.Name); Assert.Equal("AV:N/AC:L/Au:N/C:P/I:P/A:P", dto.CvssVector); Assert.Equal(7.5, dto.CvssScore); + Assert.Equal("AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", dto.Cvss3Vector); + Assert.Equal(9.8, dto.Cvss3Score); Assert.Single(dto.Software); Assert.Single(dto.Cwes); + Assert.Equal(2, dto.Sources.Length); + Assert.Contains("https://advisories.example/BDU-2025-12345", dto.Sources); + Assert.Equal(2, dto.Identifiers.Length); + Assert.Contains(dto.Identifiers, identifier => identifier.Type == "CVE" && identifier.Value == "CVE-2025-12345"); + Assert.Contains(dto.Identifiers, identifier => identifier.Type == "GHSA" && identifier.Link == "https://github.com/advisories/GHSA-xxxx-yyyy-zzzz"); + } + + [Fact] + public void TryParse_SampleArchiveEntries_ReturnDtos() + { + var path = Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "Fixtures", "export-sample.xml")); + var document = XDocument.Load(path); + var vulnerabilities = document.Root?.Elements("vul"); + Assert.NotNull(vulnerabilities); + + foreach (var element in vulnerabilities!) + { + var dto = RuBduXmlParser.TryParse(element); + Assert.NotNull(dto); + Assert.False(string.IsNullOrWhiteSpace(dto!.Identifier)); + } } } diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu/Internal/RuBduDiagnostics.cs b/src/StellaOps.Feedser.Source.Ru.Bdu/Internal/RuBduDiagnostics.cs new file mode 100644 index 00000000..c39d8963 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ru.Bdu/Internal/RuBduDiagnostics.cs @@ -0,0 +1,144 @@ +using System; +using System.Diagnostics.Metrics; +using StellaOps.Feedser.Models; + +namespace StellaOps.Feedser.Source.Ru.Bdu.Internal; + +/// +/// Emits RU-BDU specific OpenTelemetry metrics for fetch/parse/map stages. +/// +public sealed class RuBduDiagnostics : IDisposable +{ + private const string MeterName = "StellaOps.Feedser.Source.Ru.Bdu"; + private const string MeterVersion = "1.0.0"; + + private readonly Meter _meter; + + private readonly Counter _fetchAttempts; + private readonly Counter _fetchSuccess; + private readonly Counter _fetchFailures; + private readonly Counter _fetchUnchanged; + private readonly Counter _fetchCacheFallbacks; + private readonly Histogram _fetchDocumentAdds; + + private readonly Counter _parseSuccess; + private readonly Counter _parseFailures; + private readonly Histogram _parseSoftwareCount; + private readonly Histogram _parseIdentifierCount; + private readonly Histogram _parseSourceCount; + + private readonly Counter _mapSuccess; + private readonly Counter _mapFailures; + private readonly Histogram _mapPackageCount; + private readonly Histogram _mapAliasCount; + + public RuBduDiagnostics() + { + _meter = new Meter(MeterName, MeterVersion); + + _fetchAttempts = _meter.CreateCounter( + name: "ru.bdu.fetch.attempts", + unit: "operations", + description: "Number of RU-BDU archive fetch attempts."); + _fetchSuccess = _meter.CreateCounter( + name: "ru.bdu.fetch.success", + unit: "operations", + description: "Number of RU-BDU archive fetches processed successfully."); + _fetchFailures = _meter.CreateCounter( + name: "ru.bdu.fetch.failures", + unit: "operations", + description: "Number of RU-BDU archive fetches that failed."); + _fetchUnchanged = _meter.CreateCounter( + name: "ru.bdu.fetch.not_modified", + unit: "operations", + description: "Number of RU-BDU archive fetches returning HTTP 304."); + _fetchCacheFallbacks = _meter.CreateCounter( + name: "ru.bdu.fetch.cache_fallbacks", + unit: "operations", + description: "Number of RU-BDU fetches that fell back to the cached archive."); + _fetchDocumentAdds = _meter.CreateHistogram( + name: "ru.bdu.fetch.documents", + unit: "documents", + description: "Distribution of new documents written per RU-BDU fetch."); + + _parseSuccess = _meter.CreateCounter( + name: "ru.bdu.parse.success", + unit: "documents", + description: "Number of RU-BDU documents parsed into DTOs."); + _parseFailures = _meter.CreateCounter( + name: "ru.bdu.parse.failures", + unit: "documents", + description: "Number of RU-BDU documents that failed parsing."); + _parseSoftwareCount = _meter.CreateHistogram( + name: "ru.bdu.parse.software.count", + unit: "entries", + description: "Distribution of vulnerable software entries per RU-BDU DTO."); + _parseIdentifierCount = _meter.CreateHistogram( + name: "ru.bdu.parse.identifiers.count", + unit: "entries", + description: "Distribution of external identifiers per RU-BDU DTO."); + _parseSourceCount = _meter.CreateHistogram( + name: "ru.bdu.parse.sources.count", + unit: "entries", + description: "Distribution of source references per RU-BDU DTO."); + + _mapSuccess = _meter.CreateCounter( + name: "ru.bdu.map.success", + unit: "advisories", + description: "Number of canonical advisories emitted by the RU-BDU mapper."); + _mapFailures = _meter.CreateCounter( + name: "ru.bdu.map.failures", + unit: "advisories", + description: "Number of RU-BDU advisory mapping attempts that failed."); + _mapPackageCount = _meter.CreateHistogram( + name: "ru.bdu.map.packages.count", + unit: "packages", + description: "Distribution of affected packages per RU-BDU advisory."); + _mapAliasCount = _meter.CreateHistogram( + name: "ru.bdu.map.aliases.count", + unit: "aliases", + description: "Distribution of aliases per RU-BDU advisory."); + } + + public void FetchAttempt() => _fetchAttempts.Add(1); + + public void FetchUnchanged() => _fetchUnchanged.Add(1); + + public void FetchCacheFallback() => _fetchCacheFallbacks.Add(1); + + public void FetchFailure() => _fetchFailures.Add(1); + + public void FetchSuccess(int addedCount, bool usedCache) + { + _ = usedCache; + _fetchSuccess.Add(1); + if (addedCount > 0) + { + _fetchDocumentAdds.Record(addedCount); + } + } + + public void ParseSuccess(int softwareCount, int identifierCount, int sourceCount) + { + _parseSuccess.Add(1); + _parseSoftwareCount.Record(Math.Max(softwareCount, 0)); + _parseIdentifierCount.Record(Math.Max(identifierCount, 0)); + _parseSourceCount.Record(Math.Max(sourceCount, 0)); + } + + public void ParseFailure() => _parseFailures.Add(1); + + public void MapSuccess(Advisory advisory) + { + _mapSuccess.Add(1); + _mapPackageCount.Record(advisory.AffectedPackages.Length); + _mapAliasCount.Record(advisory.Aliases.Length); + } + + public void MapFailure() => _mapFailures.Add(1); + + public void Dispose() + { + _meter.Dispose(); + } +} diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu/Internal/RuBduMapper.cs b/src/StellaOps.Feedser.Source.Ru.Bdu/Internal/RuBduMapper.cs index 70acefaf..eacb61c8 100644 --- a/src/StellaOps.Feedser.Source.Ru.Bdu/Internal/RuBduMapper.cs +++ b/src/StellaOps.Feedser.Source.Ru.Bdu/Internal/RuBduMapper.cs @@ -1,7 +1,9 @@ +using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Globalization; using System.Linq; +using System.Text; using StellaOps.Feedser.Models; using StellaOps.Feedser.Normalization.Cvss; using StellaOps.Feedser.Storage.Mongo.Documents; @@ -10,6 +12,8 @@ namespace StellaOps.Feedser.Source.Ru.Bdu.Internal; internal static class RuBduMapper { + private const string RawVersionScheme = "ru-bdu.raw"; + public static Advisory Map(RuBduVulnerabilityDto dto, DocumentRecord document, DateTimeOffset recordedAt) { ArgumentNullException.ThrowIfNull(dto); @@ -26,7 +30,7 @@ internal static class RuBduMapper var packages = BuildPackages(dto, recordedAt); var references = BuildReferences(dto, document, recordedAt); var cvssMetrics = BuildCvssMetrics(dto, recordedAt, out var severityFromCvss); - var severity = severityFromCvss; + var severity = severityFromCvss ?? NormalizeSeverity(dto.SeverityText); var exploitKnown = DetermineExploitKnown(dto); return new Advisory( @@ -47,8 +51,24 @@ internal static class RuBduMapper private static IReadOnlyList BuildAliases(RuBduVulnerabilityDto dto) { - var aliases = new List(capacity: 2) { dto.Identifier }; - return aliases; + var aliases = new HashSet(StringComparer.Ordinal); + + if (!string.IsNullOrWhiteSpace(dto.Identifier)) + { + aliases.Add(dto.Identifier.Trim()); + } + + foreach (var identifier in dto.Identifiers) + { + if (string.IsNullOrWhiteSpace(identifier.Value)) + { + continue; + } + + aliases.Add(identifier.Value.Trim()); + } + + return aliases.Count == 0 ? Array.Empty() : aliases.ToArray(); } private static IReadOnlyList BuildPackages(RuBduVulnerabilityDto dto, DateTimeOffset recordedAt) @@ -66,19 +86,7 @@ internal static class RuBduMapper continue; } - var identifier = string.Join( - " ", - new[] { software.Vendor, software.Name } - .Where(static part => !string.IsNullOrWhiteSpace(part)) - .Select(static part => part!.Trim())); - - if (string.IsNullOrWhiteSpace(identifier)) - { - identifier = software.Name ?? software.Vendor ?? dto.Identifier; - } - - var isIcs = !software.Types.IsDefaultOrEmpty && software.Types.Any(static type => string.Equals(type, "ics", StringComparison.OrdinalIgnoreCase)); - + var identifier = BuildPackageIdentifier(dto.Identifier, software); var packageProvenance = new AdvisoryProvenance( RuBduConnectorPlugin.SourceName, "package", @@ -86,62 +94,327 @@ internal static class RuBduMapper recordedAt, new[] { ProvenanceFieldMasks.AffectedPackages }); - var normalizedStatus = NormalizeStatus(dto.VulStatus); - var statuses = normalizedStatus is null - ? Array.Empty() - : new[] - { - new AffectedPackageStatus(normalizedStatus, new AdvisoryProvenance( - RuBduConnectorPlugin.SourceName, - "package-status", - dto.VulStatus ?? normalizedStatus, - recordedAt, - new[] { ProvenanceFieldMasks.PackageStatuses })) - }; - - var ranges = Array.Empty(); - if (!string.IsNullOrWhiteSpace(software.Version)) - { - ranges = new[] - { - new AffectedVersionRange( - rangeKind: "string", - introducedVersion: null, - fixedVersion: null, - lastAffectedVersion: null, - rangeExpression: software.Version, - provenance: new AdvisoryProvenance( - RuBduConnectorPlugin.SourceName, - "package-range", - software.Version, - recordedAt, - new[] { ProvenanceFieldMasks.VersionRanges })) - }; - } + var statuses = BuildPackageStatuses(dto, recordedAt); + var ranges = BuildVersionRanges(software, recordedAt); + var normalizedVersions = BuildNormalizedVersions(software); packages.Add(new AffectedPackage( - isIcs ? AffectedPackageTypes.IcsVendor : AffectedPackageTypes.Vendor, + DeterminePackageType(software.Types), identifier, - platform: software.Platform, + platform: NormalizePlatform(software.Platform), versionRanges: ranges, statuses: statuses, - provenance: new[] { packageProvenance })); + provenance: new[] { packageProvenance }, + normalizedVersions: normalizedVersions)); } return packages; } + private static string BuildPackageIdentifier(string fallbackIdentifier, RuBduSoftwareDto software) + { + var parts = new[] { software.Vendor, software.Name } + .Where(static part => !string.IsNullOrWhiteSpace(part)) + .Select(static part => part!.Trim()) + .ToArray(); + + if (parts.Length == 0) + { + return software.Name ?? software.Vendor ?? fallbackIdentifier; + } + + return string.Join(" ", parts); + } + + private static IReadOnlyList BuildPackageStatuses(RuBduVulnerabilityDto dto, DateTimeOffset recordedAt) + { + var statuses = new List(capacity: 2); + + if (TryNormalizeVulnerabilityStatus(dto.VulStatus, out var vulnerabilityStatus)) + { + statuses.Add(new AffectedPackageStatus( + vulnerabilityStatus!, + new AdvisoryProvenance( + RuBduConnectorPlugin.SourceName, + "package-status", + dto.VulStatus!, + recordedAt, + new[] { ProvenanceFieldMasks.PackageStatuses }))); + } + + if (TryNormalizeFixStatus(dto.FixStatus, out var fixStatus)) + { + statuses.Add(new AffectedPackageStatus( + fixStatus!, + new AdvisoryProvenance( + RuBduConnectorPlugin.SourceName, + "package-fix-status", + dto.FixStatus!, + recordedAt, + new[] { ProvenanceFieldMasks.PackageStatuses }))); + } + + return statuses.Count == 0 ? Array.Empty() : statuses; + } + + private static bool TryNormalizeVulnerabilityStatus(string? status, out string? normalized) + { + normalized = null; + if (string.IsNullOrWhiteSpace(status)) + { + return false; + } + + var token = status.Trim().ToLowerInvariant(); + if (token.Contains("потенциал", StringComparison.Ordinal)) + { + normalized = AffectedPackageStatusCatalog.UnderInvestigation; + return true; + } + + if (token.Contains("подтвержд", StringComparison.Ordinal)) + { + normalized = AffectedPackageStatusCatalog.Affected; + return true; + } + + if (token.Contains("актуал", StringComparison.Ordinal)) + { + normalized = AffectedPackageStatusCatalog.Affected; + return true; + } + + return false; + } + + private static bool TryNormalizeFixStatus(string? status, out string? normalized) + { + normalized = null; + if (string.IsNullOrWhiteSpace(status)) + { + return false; + } + + var token = status.Trim().ToLowerInvariant(); + if (token.Contains("устранена", StringComparison.Ordinal)) + { + normalized = AffectedPackageStatusCatalog.Fixed; + return true; + } + + if (token.Contains("информация об устранении отсутствует", StringComparison.Ordinal)) + { + normalized = AffectedPackageStatusCatalog.Unknown; + return true; + } + + return false; + } + + private static IReadOnlyList BuildVersionRanges(RuBduSoftwareDto software, DateTimeOffset recordedAt) + { + var tokens = SplitVersionTokens(software.Version).ToArray(); + if (tokens.Length == 0) + { + return Array.Empty(); + } + + var ranges = new List(tokens.Length); + foreach (var token in tokens) + { + ranges.Add(new AffectedVersionRange( + rangeKind: "string", + introducedVersion: null, + fixedVersion: null, + lastAffectedVersion: null, + rangeExpression: token, + provenance: new AdvisoryProvenance( + RuBduConnectorPlugin.SourceName, + "package-range", + token, + recordedAt, + new[] { ProvenanceFieldMasks.VersionRanges }))); + } + + return ranges; + } + + private static IReadOnlyList BuildNormalizedVersions(RuBduSoftwareDto software) + { + var tokens = SplitVersionTokens(software.Version).ToArray(); + if (tokens.Length == 0) + { + return Array.Empty(); + } + + var rules = new List(tokens.Length); + foreach (var token in tokens) + { + rules.Add(new NormalizedVersionRule( + RawVersionScheme, + NormalizedVersionRuleTypes.Exact, + value: token)); + } + + return rules; + } + + private static IEnumerable SplitVersionTokens(string? version) + { + if (string.IsNullOrWhiteSpace(version)) + { + yield break; + } + + var raw = version.Trim(); + if (raw.Length == 0 || string.Equals(raw, "-", StringComparison.Ordinal)) + { + yield break; + } + + var tokens = raw.Split(VersionSeparators, StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (tokens.Length == 0) + { + yield return raw; + yield break; + } + + foreach (var token in tokens) + { + if (string.IsNullOrWhiteSpace(token) || string.Equals(token, "-", StringComparison.Ordinal)) + { + continue; + } + + if (token.Equals("не указано", StringComparison.OrdinalIgnoreCase) + || token.Equals("не указана", StringComparison.OrdinalIgnoreCase) + || token.Equals("не определено", StringComparison.OrdinalIgnoreCase) + || token.Equals("не определена", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + yield return token; + } + } + + private static string? NormalizePlatform(string? platform) + { + if (string.IsNullOrWhiteSpace(platform)) + { + return null; + } + + var trimmed = platform.Trim(); + if (trimmed.Length == 0) + { + return null; + } + + if (trimmed.Equals("-", StringComparison.Ordinal) + || trimmed.Equals("не указана", StringComparison.OrdinalIgnoreCase) + || trimmed.Equals("не указано", StringComparison.OrdinalIgnoreCase)) + { + return null; + } + + return trimmed; + } + + private static string DeterminePackageType(ImmutableArray types) + => IsIcsSoftware(types) ? AffectedPackageTypes.IcsVendor : AffectedPackageTypes.Vendor; + + private static bool IsIcsSoftware(ImmutableArray types) + { + if (types.IsDefaultOrEmpty) + { + return false; + } + + foreach (var type in types) + { + if (string.IsNullOrWhiteSpace(type)) + { + continue; + } + + var token = type.Trim(); + if (token.Contains("АСУ", StringComparison.OrdinalIgnoreCase) + || token.Contains("SCADA", StringComparison.OrdinalIgnoreCase) + || token.Contains("ICS", StringComparison.OrdinalIgnoreCase) + || token.Contains("промыш", StringComparison.OrdinalIgnoreCase) + || token.Contains("industrial", StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + + return false; + } + private static IReadOnlyList BuildReferences(RuBduVulnerabilityDto dto, DocumentRecord document, DateTimeOffset recordedAt) { - var references = new List + var references = new List(); + var seen = new HashSet(StringComparer.OrdinalIgnoreCase); + + void AddReference(string? url, string kind, string sourceTag, string? summary = null) { - new(document.Uri, "details", "ru-bdu", summary: null, new AdvisoryProvenance( - RuBduConnectorPlugin.SourceName, - "reference", - document.Uri, - recordedAt, - new[] { ProvenanceFieldMasks.References })) - }; + if (string.IsNullOrWhiteSpace(url)) + { + return; + } + + var trimmed = url.Trim(); + if (!Uri.TryCreate(trimmed, UriKind.Absolute, out var uri)) + { + if (trimmed.StartsWith("www.", StringComparison.OrdinalIgnoreCase) + && Uri.TryCreate($"https://{trimmed}", UriKind.Absolute, out var prefixed)) + { + uri = prefixed; + } + else + { + return; + } + } + + var canonical = uri.ToString(); + if (!seen.Add(canonical)) + { + return; + } + + references.Add(new AdvisoryReference( + canonical, + kind, + sourceTag, + summary, + new AdvisoryProvenance( + RuBduConnectorPlugin.SourceName, + "reference", + canonical, + recordedAt, + new[] { ProvenanceFieldMasks.References }))); + } + + AddReference(document.Uri, "details", RuBduConnectorPlugin.SourceName); + + foreach (var source in dto.Sources) + { + AddReference(source, "source", RuBduConnectorPlugin.SourceName); + } + + foreach (var identifier in dto.Identifiers) + { + if (string.IsNullOrWhiteSpace(identifier.Link)) + { + continue; + } + + var sourceTag = NormalizeIdentifierType(identifier.Type); + var kind = string.Equals(sourceTag, "cve", StringComparison.Ordinal) ? "cve" : "external"; + AddReference(identifier.Link, kind, sourceTag, identifier.Value); + } foreach (var cwe in dto.Cwes) { @@ -157,17 +430,35 @@ internal static class RuBduMapper } var url = $"https://cwe.mitre.org/data/definitions/{slug}.html"; - references.Add(new AdvisoryReference(url, "cwe", "cwe", cwe.Name, new AdvisoryProvenance( - RuBduConnectorPlugin.SourceName, - "reference", - url, - recordedAt, - new[] { ProvenanceFieldMasks.References }))); + AddReference(url, "cwe", "cwe", cwe.Name); } return references; } + private static string NormalizeIdentifierType(string? type) + { + if (string.IsNullOrWhiteSpace(type)) + { + return RuBduConnectorPlugin.SourceName; + } + + var builder = new StringBuilder(type.Length); + foreach (var ch in type) + { + if (char.IsLetterOrDigit(ch)) + { + builder.Append(char.ToLowerInvariant(ch)); + } + else if (ch is '-' or '_' or '.') + { + builder.Append(ch); + } + } + + return builder.Length == 0 ? RuBduConnectorPlugin.SourceName : builder.ToString(); + } + private static IReadOnlyList BuildCvssMetrics(RuBduVulnerabilityDto dto, DateTimeOffset recordedAt, out string? severity) { severity = null; @@ -183,7 +474,6 @@ internal static class RuBduMapper new[] { ProvenanceFieldMasks.CvssMetrics }); var metric = normalized.ToModel(provenance); metrics.Add(metric); - severity ??= metric.BaseSeverity; } if (!string.IsNullOrWhiteSpace(dto.Cvss3Vector) && CvssMetricNormalizer.TryNormalize("3.1", dto.Cvss3Vector, dto.Cvss3Score, null, out var normalized3)) @@ -196,7 +486,6 @@ internal static class RuBduMapper new[] { ProvenanceFieldMasks.CvssMetrics }); var metric = normalized3.ToModel(provenance); metrics.Add(metric); - severity ??= metric.BaseSeverity; } if (metrics.Count > 1) @@ -207,26 +496,40 @@ internal static class RuBduMapper .ToList(); } + severity = metrics.Count > 0 ? metrics[0].BaseSeverity : severity; return metrics; } - private static string NormalizeStatus(string? status) + + private static string? NormalizeSeverity(string? severityText) { - if (string.IsNullOrWhiteSpace(status)) + if (string.IsNullOrWhiteSpace(severityText)) { return null; } - var normalized = status.Trim().ToLowerInvariant(); - return normalized switch + var token = severityText.Trim().ToLowerInvariant(); + if (token.Contains("критич", StringComparison.Ordinal)) { - "устранена" or "устранена производителем" or "устранена разработчиком" => AffectedPackageStatusCatalog.Fixed, - "устраняется" or "устранение планируется" or "разрабатывается" => AffectedPackageStatusCatalog.Pending, - "не устранена" => AffectedPackageStatusCatalog.Pending, - "актуальна" or "подтверждена" or "подтверждена производителем" or "подтверждена исследователями" => AffectedPackageStatusCatalog.Affected, - _ => null, - }; - } + return "critical"; + } + if (token.Contains("высок", StringComparison.Ordinal)) + { + return "high"; + } + + if (token.Contains("средн", StringComparison.Ordinal) || token.Contains("умер", StringComparison.Ordinal)) + { + return "medium"; + } + + if (token.Contains("низк", StringComparison.Ordinal)) + { + return "low"; + } + + return null; + } private static bool DetermineExploitKnown(RuBduVulnerabilityDto dto) { @@ -246,4 +549,6 @@ internal static class RuBduMapper return false; } + + private static readonly char[] VersionSeparators = { ',', ';', '\r', '\n', '\t' }; } diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu/Internal/RuBduVulnerabilityDto.cs b/src/StellaOps.Feedser.Source.Ru.Bdu/Internal/RuBduVulnerabilityDto.cs index 62f54b09..104ab1c4 100644 --- a/src/StellaOps.Feedser.Source.Ru.Bdu/Internal/RuBduVulnerabilityDto.cs +++ b/src/StellaOps.Feedser.Source.Ru.Bdu/Internal/RuBduVulnerabilityDto.cs @@ -23,7 +23,9 @@ internal sealed record RuBduVulnerabilityDto( string? Other, ImmutableArray Software, ImmutableArray Environment, - ImmutableArray Cwes) + ImmutableArray Cwes, + ImmutableArray Sources, + ImmutableArray Identifiers) { [JsonIgnore] public bool HasCvss => !string.IsNullOrWhiteSpace(CvssVector) || !string.IsNullOrWhiteSpace(Cvss3Vector); @@ -43,3 +45,8 @@ internal sealed record RuBduEnvironmentDto( string? Platform); internal sealed record RuBduCweDto(string Identifier, string? Name); + +internal sealed record RuBduExternalIdentifierDto( + string Type, + string Value, + string? Link); diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu/Internal/RuBduXmlParser.cs b/src/StellaOps.Feedser.Source.Ru.Bdu/Internal/RuBduXmlParser.cs index 522a2aca..424b1185 100644 --- a/src/StellaOps.Feedser.Source.Ru.Bdu/Internal/RuBduXmlParser.cs +++ b/src/StellaOps.Feedser.Source.Ru.Bdu/Internal/RuBduXmlParser.cs @@ -1,3 +1,4 @@ +using System.Collections.Generic; using System.Collections.Immutable; using System.Linq; using System.Globalization; @@ -39,9 +40,23 @@ internal static class RuBduXmlParser var cvss3Vector = Normalize(cvss3VectorElement?.Value); var cvss3Score = ParseDouble(cvss3VectorElement?.Attribute("score")?.Value); + if (string.IsNullOrWhiteSpace(cvssVector)) + { + cvssVector = null; + cvssScore = null; + } + + if (string.IsNullOrWhiteSpace(cvss3Vector)) + { + cvss3Vector = null; + cvss3Score = null; + } + var software = ParseSoftware(element.Element("vulnerable_software")); var environment = ParseEnvironment(element.Element("environment")); var cwes = ParseCwes(element.Element("cwes")); + var sources = ParseSources(element.Element("sources")); + var identifiers = ParseIdentifiers(element.Element("identifiers")); return new RuBduVulnerabilityDto( identifier.Trim(), @@ -63,7 +78,9 @@ internal static class RuBduXmlParser other, software, environment, - cwes); + cwes, + sources, + identifiers); } private static ImmutableArray ParseSoftware(XElement? root) @@ -133,6 +150,61 @@ internal static class RuBduXmlParser return builder.ToImmutable(); } + private static ImmutableArray ParseSources(XElement? root) + { + if (root is null) + { + return ImmutableArray.Empty; + } + + var raw = root.Value; + if (string.IsNullOrWhiteSpace(raw)) + { + return ImmutableArray.Empty; + } + + var tokens = raw + .Split(SourceSeparators, StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Select(static token => token.Trim()) + .Where(static token => !string.IsNullOrWhiteSpace(token)) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToImmutableArray(); + + return tokens.IsDefaultOrEmpty ? ImmutableArray.Empty : tokens; + } + + private static ImmutableArray ParseIdentifiers(XElement? root) + { + if (root is null) + { + return ImmutableArray.Empty; + } + + var builder = ImmutableArray.CreateBuilder(); + foreach (var identifier in root.Elements("identifier")) + { + var value = Normalize(identifier?.Value); + if (string.IsNullOrWhiteSpace(value)) + { + continue; + } + + var type = identifier?.Attribute("type")?.Value?.Trim(); + var link = identifier?.Attribute("link")?.Value?.Trim(); + + if (string.IsNullOrWhiteSpace(type)) + { + type = "external"; + } + + builder.Add(new RuBduExternalIdentifierDto(type, value.Trim(), string.IsNullOrWhiteSpace(link) ? null : link)); + } + + return builder.ToImmutable(); + } + + private static readonly char[] SourceSeparators = { '\r', '\n', '\t', ' ' }; + private static DateTimeOffset? ParseDate(string? value) { if (string.IsNullOrWhiteSpace(value)) diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu/README.md b/src/StellaOps.Feedser.Source.Ru.Bdu/README.md new file mode 100644 index 00000000..2edf64b6 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ru.Bdu/README.md @@ -0,0 +1,40 @@ +# RU BDU Connector Notes + +## Data source & access requirements + +- **Primary feed**: `https://bdu.fstec.ru/files/documents/vulxml.zip` exposes the full vulnerability catalogue as a zipped XML tree (“`export/export.xml`”). FSTEC refreshes the archive several times per week; incremental diffs are not published, so every run downloads the full bundle. +- **TLS trust**: the endpoint presents certificates chained to the Russian Trusted Root/Sub CAs. Bundle the official PEMs inside the deployment (`certificates/russian_trusted_root_ca.pem`, `certificates/russian_trusted_sub_ca.pem`, or the combined `certificates/russian_trusted_bundle.pem`) and point the connector at them, e.g.: + + ```yaml + feedser: + httpClients: + source.bdu: + trustedRootPaths: + - certificates/russian_trusted_bundle.pem + allowInvalidCertificates: false + timeout: 00:02:00 + ``` + +- **Offline Kit**: copy the PEM bundle above into the Offline Kit artefacts and set `feedser:offline:root` (or `FEEDSER_OFFLINE_ROOT`) so air‑gapped installs can resolve relative certificate paths. Package the most recent `vulxml.zip` alongside cached exports when preparing air-gap refreshes. + +The connector keeps a local cache (`cache/ru-bdu/vulxml.zip`) so transient fetch failures can fall back to the last successful archive without blocking the cursor. + +## Telemetry + +The connector publishes an OpenTelemetry meter named `StellaOps.Feedser.Source.Ru.Bdu`. Instruments include: + +- `ru.bdu.fetch.*` – `attempts`, `success`, `failures`, `not_modified`, `cache_fallbacks`, and histogram `ru.bdu.fetch.documents`. +- `ru.bdu.parse.*` – counters for success/failures plus histograms tracking vulnerable software, external identifiers, and source reference counts per DTO. +- `ru.bdu.map.*` – counters for success/failures with histograms covering affected package counts and alias fan-out per advisory. + +Use these metrics to alert on repeated cache fallbacks, sustained parse failures, or unexpected advisory fan-out. + +## Regression fixtures + +Deterministic fixtures live under `src/StellaOps.Feedser.Source.Ru.Bdu.Tests/Fixtures`. Run + +```bash +dotnet test src/StellaOps.Feedser.Source.Ru.Bdu.Tests +``` + +to execute the RU BDU snapshot suite, and set `UPDATE_BDU_FIXTURES=1` to refresh stored snapshots when ingest logic changes. The harness records the fetch requests, documents, DTOs, advisories, and state cursor to guarantee reproducible pipelines across machines. diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu/RuBduConnector.cs b/src/StellaOps.Feedser.Source.Ru.Bdu/RuBduConnector.cs index e2aa465b..ba709d13 100644 --- a/src/StellaOps.Feedser.Source.Ru.Bdu/RuBduConnector.cs +++ b/src/StellaOps.Feedser.Source.Ru.Bdu/RuBduConnector.cs @@ -40,6 +40,7 @@ public sealed class RuBduConnector : IFeedConnector private readonly IAdvisoryStore _advisoryStore; private readonly ISourceStateRepository _stateRepository; private readonly RuBduOptions _options; + private readonly RuBduDiagnostics _diagnostics; private readonly TimeProvider _timeProvider; private readonly ILogger _logger; @@ -54,6 +55,7 @@ public sealed class RuBduConnector : IFeedConnector IAdvisoryStore advisoryStore, ISourceStateRepository stateRepository, IOptions options, + RuBduDiagnostics diagnostics, TimeProvider? timeProvider, ILogger logger) { @@ -65,6 +67,7 @@ public sealed class RuBduConnector : IFeedConnector _stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository)); _options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options)); _options.Validate(); + _diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics)); _timeProvider = timeProvider ?? TimeProvider.System; _logger = logger ?? throw new ArgumentNullException(nameof(logger)); _cacheDirectory = ResolveCacheDirectory(_options.CacheDirectory); @@ -78,12 +81,14 @@ public sealed class RuBduConnector : IFeedConnector { ArgumentNullException.ThrowIfNull(services); + _diagnostics.FetchAttempt(); + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); var pendingDocuments = cursor.PendingDocuments.ToHashSet(); var pendingMappings = cursor.PendingMappings.ToHashSet(); var now = _timeProvider.GetUtcNow(); - SourceFetchContentResult archiveResult = default; + SourceFetchContentResult? archiveResult = null; byte[]? archiveContent = null; var usedCache = false; @@ -100,18 +105,20 @@ public sealed class RuBduConnector : IFeedConnector TimeoutOverride = _options.RequestTimeout, }; - archiveResult = await _fetchService.FetchContentAsync(request, cancellationToken).ConfigureAwait(false); + var fetchResult = await _fetchService.FetchContentAsync(request, cancellationToken).ConfigureAwait(false); + archiveResult = fetchResult; - if (archiveResult.IsNotModified) + if (fetchResult.IsNotModified) { _logger.LogDebug("RU-BDU archive not modified."); + _diagnostics.FetchUnchanged(); await UpdateCursorAsync(cursor.WithLastSuccessfulFetch(now), cancellationToken).ConfigureAwait(false); return; } - if (archiveResult.IsSuccess && archiveResult.Content is not null) + if (fetchResult.IsSuccess && fetchResult.Content is not null) { - archiveContent = archiveResult.Content; + archiveContent = fetchResult.Content; TryWriteCachedArchive(archiveContent); } } @@ -122,9 +129,11 @@ public sealed class RuBduConnector : IFeedConnector _logger.LogWarning(ex, "RU-BDU archive fetch failed; using cached artefact {CachePath}", _archiveCachePath); archiveContent = cachedFallback; usedCache = true; + _diagnostics.FetchCacheFallback(); } else { + _diagnostics.FetchFailure(); _logger.LogError(ex, "RU-BDU archive fetch failed for {ArchiveUri}", _options.DataArchiveUri); await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false); throw; @@ -135,19 +144,23 @@ public sealed class RuBduConnector : IFeedConnector { if (TryReadCachedArchive(out var cachedFallback)) { - _logger.LogWarning("RU-BDU archive unavailable (status={Status}); using cached artefact {CachePath}", archiveResult.StatusCode, _archiveCachePath); + var status = archiveResult?.StatusCode; + _logger.LogWarning("RU-BDU archive unavailable (status={Status}); using cached artefact {CachePath}", status, _archiveCachePath); archiveContent = cachedFallback; usedCache = true; + _diagnostics.FetchCacheFallback(); } else { - _logger.LogWarning("RU-BDU archive fetch returned no content (status={Status})", archiveResult.StatusCode); + var status = archiveResult?.StatusCode; + _logger.LogWarning("RU-BDU archive fetch returned no content (status={Status})", status); + _diagnostics.FetchSuccess(addedCount: 0, usedCache: false); await UpdateCursorAsync(cursor.WithLastSuccessfulFetch(now), cancellationToken).ConfigureAwait(false); return; } } - var archiveLastModified = archiveResult.LastModified; + var archiveLastModified = archiveResult?.LastModified; int added; try { @@ -155,14 +168,22 @@ public sealed class RuBduConnector : IFeedConnector } catch (Exception ex) { - if (!usedCache) - { - _logger.LogError(ex, "RU-BDU archive processing failed"); - await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false); - } + _diagnostics.FetchFailure(); + _logger.LogError(ex, "RU-BDU archive processing failed"); + await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false); throw; } + _diagnostics.FetchSuccess(added, usedCache); + if (added > 0) + { + _logger.LogInformation("RU-BDU processed {Added} vulnerabilities (cacheUsed={CacheUsed})", added, usedCache); + } + else + { + _logger.LogDebug("RU-BDU fetch completed with no new vulnerabilities (cacheUsed={CacheUsed})", usedCache); + } + var updatedCursor = cursor .WithPendingDocuments(pendingDocuments) .WithPendingMappings(pendingMappings) @@ -191,6 +212,7 @@ public sealed class RuBduConnector : IFeedConnector var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); if (document is null) { + _diagnostics.ParseFailure(); pendingDocuments.Remove(documentId); pendingMappings.Remove(documentId); continue; @@ -199,6 +221,7 @@ public sealed class RuBduConnector : IFeedConnector if (!document.GridFsId.HasValue) { _logger.LogWarning("RU-BDU document {DocumentId} missing GridFS payload", documentId); + _diagnostics.ParseFailure(); await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); pendingDocuments.Remove(documentId); pendingMappings.Remove(documentId); @@ -213,6 +236,7 @@ public sealed class RuBduConnector : IFeedConnector catch (Exception ex) { _logger.LogError(ex, "RU-BDU unable to download raw document {DocumentId}", documentId); + _diagnostics.ParseFailure(); throw; } @@ -224,6 +248,7 @@ public sealed class RuBduConnector : IFeedConnector catch (Exception ex) { _logger.LogWarning(ex, "RU-BDU failed to deserialize document {DocumentId}", documentId); + _diagnostics.ParseFailure(); await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); pendingDocuments.Remove(documentId); pendingMappings.Remove(documentId); @@ -233,6 +258,7 @@ public sealed class RuBduConnector : IFeedConnector if (dto is null) { _logger.LogWarning("RU-BDU document {DocumentId} produced null DTO", documentId); + _diagnostics.ParseFailure(); await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); pendingDocuments.Remove(documentId); pendingMappings.Remove(documentId); @@ -243,6 +269,10 @@ public sealed class RuBduConnector : IFeedConnector var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "ru-bdu.v1", bson, _timeProvider.GetUtcNow()); await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false); await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false); + _diagnostics.ParseSuccess( + dto.Software.IsDefaultOrEmpty ? 0 : dto.Software.Length, + dto.Identifiers.IsDefaultOrEmpty ? 0 : dto.Identifiers.Length, + dto.Sources.IsDefaultOrEmpty ? 0 : dto.Sources.Length); pendingDocuments.Remove(documentId); if (!pendingMappings.Contains(documentId)) @@ -277,6 +307,7 @@ public sealed class RuBduConnector : IFeedConnector var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); if (document is null) { + _diagnostics.MapFailure(); pendingMappings.Remove(documentId); continue; } @@ -285,6 +316,7 @@ public sealed class RuBduConnector : IFeedConnector if (dtoRecord is null) { _logger.LogWarning("RU-BDU document {DocumentId} missing DTO payload", documentId); + _diagnostics.MapFailure(); await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); pendingMappings.Remove(documentId); continue; @@ -298,6 +330,7 @@ public sealed class RuBduConnector : IFeedConnector catch (Exception ex) { _logger.LogError(ex, "RU-BDU failed to deserialize DTO for document {DocumentId}", documentId); + _diagnostics.MapFailure(); await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); pendingMappings.Remove(documentId); continue; @@ -308,11 +341,13 @@ public sealed class RuBduConnector : IFeedConnector var advisory = RuBduMapper.Map(dto, document, dtoRecord.ValidatedAt); await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false); await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false); + _diagnostics.MapSuccess(advisory); pendingMappings.Remove(documentId); } catch (Exception ex) { _logger.LogError(ex, "RU-BDU mapping failed for document {DocumentId}", documentId); + _diagnostics.MapFailure(); await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); pendingMappings.Remove(documentId); } diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu/RuBduServiceCollectionExtensions.cs b/src/StellaOps.Feedser.Source.Ru.Bdu/RuBduServiceCollectionExtensions.cs index d87a44f4..c35b843c 100644 --- a/src/StellaOps.Feedser.Source.Ru.Bdu/RuBduServiceCollectionExtensions.cs +++ b/src/StellaOps.Feedser.Source.Ru.Bdu/RuBduServiceCollectionExtensions.cs @@ -1,8 +1,9 @@ using System.Net; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Options; -using StellaOps.Feedser.Source.Ru.Bdu.Configuration; using StellaOps.Feedser.Source.Common.Http; +using StellaOps.Feedser.Source.Ru.Bdu.Configuration; +using StellaOps.Feedser.Source.Ru.Bdu.Internal; namespace StellaOps.Feedser.Source.Ru.Bdu; @@ -36,6 +37,7 @@ public static class RuBduServiceCollectionExtensions }; }); + services.AddSingleton(); services.AddTransient(); return services; diff --git a/src/StellaOps.Feedser.Source.Ru.Bdu/TASKS.md b/src/StellaOps.Feedser.Source.Ru.Bdu/TASKS.md index 53d28c0e..36119b42 100644 --- a/src/StellaOps.Feedser.Source.Ru.Bdu/TASKS.md +++ b/src/StellaOps.Feedser.Source.Ru.Bdu/TASKS.md @@ -2,10 +2,10 @@ | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |FEEDCONN-RUBDU-02-001 Identify BDU data source & schema|BE-Conn-BDU|Research|**DONE (2025-10-11)** – Candidate endpoints (`https://bdu.fstec.ru/component/rsform/form/7-bdu?format=xml`, `...?format=json`) return 403/404 even with `--insecure` because TLS chain requires Russian Trusted Sub CA and WAF expects referer/session headers. Documented request/response samples in `docs/feedser-connector-research-20251011.md`; blocked until trusted root + access strategy from Ops.| -|FEEDCONN-RUBDU-02-002 Fetch pipeline & cursor handling|BE-Conn-BDU|Source.Common, Storage.Mongo|**DOING (2025-10-12)** – Fetch job now expands `vulxml.zip` into per-advisory JSON documents with cursor tracking + trust store wiring (`certificates/russian_trusted_*`). Parser/mapper emit canonical advisories; next up is wiring fixtures, regression tests, and telemetry before closing the task.| -|FEEDCONN-RUBDU-02-003 DTO/parser implementation|BE-Conn-BDU|Source.Common|**DOING (2025-10-12)** – `RuBduXmlParser` materialises per-entry DTOs and serialises them into Mongo DTO records; remaining work covers resilience fixtures and edge-case coverage (multi-CWE, empty software lists).| -|FEEDCONN-RUBDU-02-004 Canonical mapping & range primitives|BE-Conn-BDU|Models|**DOING (2025-10-12)** – `RuBduMapper` produces canonical advisories (aliases, references, vendor packages, CVSS). Follow-up: refine status translation + range primitives once richer samples arrive; ensure fixtures cover environment/other metadata before marking DONE.| -|FEEDCONN-RUBDU-02-005 Deterministic fixtures & regression tests|QA|Testing|**TODO** – Add fetch/parse/map tests with fixtures; support `UPDATE_BDU_FIXTURES=1`.| -|FEEDCONN-RUBDU-02-006 Telemetry & documentation|DevEx|Docs|**TODO** – Add logging/metrics, document connector configuration, close backlog when complete.| -|FEEDCONN-RUBDU-02-007 Access & export options assessment|BE-Conn-BDU|Research|**TODO** – Once access unblocked, compare RSS/Atom (if restored) vs HTML table export (`/vul` list) and legacy CSV dumps. Need to confirm whether login/anti-bot tokens required and outline offline mirroring plan (one-time tarball seeded into Offline Kit).| -|FEEDCONN-RUBDU-02-008 Trusted root onboarding plan|BE-Conn-BDU|Source.Common|**DOING (2025-10-12)** – Mirrored official Russian Trusted Root/Sub CA PEMs from rostelecom.ru (`certificates/russian_trusted_root_ca.pem`, `certificates/russian_trusted_sub_ca.pem`, bundle `certificates/russian_trusted_bundle.pem`) and validated TLS handshake. Next: confirm packaging guidance for Offline Kit + config samples using `feedser:httpClients:source.bdu:trustedRootPaths`.| +|FEEDCONN-RUBDU-02-002 Fetch pipeline & cursor handling|BE-Conn-BDU|Source.Common, Storage.Mongo|**DONE (2025-10-14)** – Connector streams `vulxml.zip` through cached fetches, persists JSON payloads via `RawDocumentStorage`, and tracks cursor pending sets. Added cache fallback + deterministic SHA logging and state updates tied to `TimeProvider`.| +|FEEDCONN-RUBDU-02-003 DTO/parser implementation|BE-Conn-BDU|Source.Common|**DONE (2025-10-14)** – `RuBduXmlParser` now captures identifiers, source links, CVSS 2/3 metrics, CWE arrays, and environment/software metadata with coverage for multi-entry fixtures.| +|FEEDCONN-RUBDU-02-004 Canonical mapping & range primitives|BE-Conn-BDU|Models|**DONE (2025-10-14)** – `RuBduMapper` emits vendor/ICS packages with normalized `ru-bdu.raw` rules, dual status provenance, alias/reference hydration (CVE, external, source), and CVSS severity normalisation.| +|FEEDCONN-RUBDU-02-005 Deterministic fixtures & regression tests|QA|Testing|**DONE (2025-10-14)** – Added connector harness snapshot suite with canned archive, state/documents/dtos/advisories snapshots under `Fixtures/`, gated by `UPDATE_BDU_FIXTURES`.| +|FEEDCONN-RUBDU-02-006 Telemetry & documentation|DevEx|Docs|**DONE (2025-10-14)** – Introduced `RuBduDiagnostics` meter (fetch/parse/map counters & histograms) and authored connector README covering configuration, trusted roots, telemetry, and offline behaviour.| +|FEEDCONN-RUBDU-02-007 Access & export options assessment|BE-Conn-BDU|Research|**DONE (2025-10-14)** – Documented archive access constraints, offline mirroring expectations, and export packaging in `src/StellaOps.Feedser.Source.Ru.Bdu/README.md` + flagged Offline Kit bundling requirements.| +|FEEDCONN-RUBDU-02-008 Trusted root onboarding plan|BE-Conn-BDU|Source.Common|**DONE (2025-10-14)** – Validated Russian Trusted Root/Sub CA bundle wiring (`certificates/russian_trusted_bundle.pem`), updated Offline Kit guidance, and surfaced `feedser:httpClients:source.bdu:trustedRootPaths` sample configuration.| diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/bulletin-legacy.json.zip b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/bulletin-legacy.json.zip new file mode 100644 index 00000000..efbbfe36 Binary files /dev/null and b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/bulletin-legacy.json.zip differ diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/bulletin-sample.json.zip b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/bulletin-sample.json.zip index 52ef47b2..c116033c 100644 Binary files a/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/bulletin-sample.json.zip and b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/bulletin-sample.json.zip differ diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/listing-page2.html b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/listing-page2.html new file mode 100644 index 00000000..8067dfba --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/listing-page2.html @@ -0,0 +1,7 @@ + + + + + diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/listing.html b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/listing.html index 56e04c36..abd43946 100644 --- a/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/listing.html +++ b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/listing.html @@ -3,5 +3,8 @@ + diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/nkcki-advisories.snapshot.json b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/nkcki-advisories.snapshot.json index f382b68c..56c113aa 100644 --- a/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/nkcki-advisories.snapshot.json +++ b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/Fixtures/nkcki-advisories.snapshot.json @@ -3,11 +3,57 @@ "advisoryKey": "BDU:2025-01001", "affectedPackages": [ { - "type": "vendor", - "identifier": "SampleSCADA <= 4.2", - "platform": null, - "versionRanges": [], - "normalizedVersions": [], + "type": "ics-vendor", + "identifier": "SampleVendor SampleGateway", + "platform": "Energy, ICS", + "versionRanges": [ + { + "fixedVersion": null, + "introducedVersion": "2.0", + "lastAffectedVersion": null, + "primitives": { + "evr": null, + "hasVendorExtensions": false, + "nevra": null, + "semVer": { + "constraintExpression": ">= 2.0", + "exactValue": null, + "fixed": null, + "fixedInclusive": false, + "introduced": "2.0", + "introducedInclusive": true, + "lastAffected": null, + "lastAffectedInclusive": false, + "style": "greaterThanOrEqual" + }, + "vendorExtensions": null + }, + "provenance": { + "source": "ru-nkcki", + "kind": "package-range", + "value": "SampleVendor SampleGateway >= 2.0 All platforms", + "decisionReason": null, + "recordedAt": "2025-10-12T00:01:00+00:00", + "fieldMask": [ + "affectedpackages[].versionranges[]" + ] + }, + "rangeExpression": ">= 2.0", + "rangeKind": "semver" + } + ], + "normalizedVersions": [ + { + "scheme": "semver", + "type": "gte", + "min": "2.0", + "minInclusive": true, + "max": null, + "maxInclusive": null, + "value": null, + "notes": "SampleVendor SampleGateway >= 2.0 All platforms" + } + ], "statuses": [ { "provenance": { @@ -15,7 +61,7 @@ "kind": "package-status", "value": "patch_available", "decisionReason": null, - "recordedAt": "2025-09-22T00:00:00+00:00", + "recordedAt": "2025-10-12T00:01:00+00:00", "fieldMask": [ "affectedpackages[].statuses[]" ] @@ -27,9 +73,89 @@ { "source": "ru-nkcki", "kind": "package", - "value": "SampleSCADA <= 4.2", + "value": "SampleVendor SampleGateway >= 2.0 All platforms", "decisionReason": null, - "recordedAt": "2025-09-22T00:00:00+00:00", + "recordedAt": "2025-10-12T00:01:00+00:00", + "fieldMask": [ + "affectedpackages[]" + ] + } + ] + }, + { + "type": "ics-vendor", + "identifier": "SampleVendor SampleSCADA", + "platform": "Energy, ICS", + "versionRanges": [ + { + "fixedVersion": null, + "introducedVersion": null, + "lastAffectedVersion": "4.2", + "primitives": { + "evr": null, + "hasVendorExtensions": false, + "nevra": null, + "semVer": { + "constraintExpression": "<= 4.2", + "exactValue": null, + "fixed": null, + "fixedInclusive": false, + "introduced": null, + "introducedInclusive": true, + "lastAffected": "4.2", + "lastAffectedInclusive": true, + "style": "lessThanOrEqual" + }, + "vendorExtensions": null + }, + "provenance": { + "source": "ru-nkcki", + "kind": "package-range", + "value": "SampleVendor SampleSCADA <= 4.2", + "decisionReason": null, + "recordedAt": "2025-10-12T00:01:00+00:00", + "fieldMask": [ + "affectedpackages[].versionranges[]" + ] + }, + "rangeExpression": "<= 4.2", + "rangeKind": "semver" + } + ], + "normalizedVersions": [ + { + "scheme": "semver", + "type": "lte", + "min": null, + "minInclusive": null, + "max": "4.2", + "maxInclusive": true, + "value": null, + "notes": "SampleVendor SampleSCADA <= 4.2" + } + ], + "statuses": [ + { + "provenance": { + "source": "ru-nkcki", + "kind": "package-status", + "value": "patch_available", + "decisionReason": null, + "recordedAt": "2025-10-12T00:01:00+00:00", + "fieldMask": [ + "affectedpackages[].statuses[]" + ] + }, + "status": "fixed" + } + ], + "provenance": [ + { + "source": "ru-nkcki", + "kind": "package", + "value": "SampleVendor SampleSCADA <= 4.2", + "decisionReason": null, + "recordedAt": "2025-10-12T00:01:00+00:00", "fieldMask": [ "affectedpackages[]" ] @@ -51,13 +177,29 @@ "kind": "cvss", "value": "CVSS:3.1/AV:N/AC:H/PR:L/UI:N/S:C/C:H/I:H/A:H", "decisionReason": null, - "recordedAt": "2025-09-22T00:00:00+00:00", + "recordedAt": "2025-10-12T00:01:00+00:00", "fieldMask": [ "cvssmetrics[]" ] }, "vector": "CVSS:3.1/AV:N/AC:H/PR:L/UI:N/S:C/C:H/I:H/A:H", "version": "3.1" + }, + { + "baseScore": 6.4, + "baseSeverity": "medium", + "provenance": { + "source": "ru-nkcki", + "kind": "cvss", + "value": "CVSS:4.0/AV:N/AC:H/AT:N/PR:L/UI:N/VC:H/VI:H/VA:H", + "decisionReason": null, + "recordedAt": "2025-10-12T00:01:00+00:00", + "fieldMask": [ + "cvssmetrics[]" + ] + }, + "vector": "CVSS:4.0/AV:N/AC:H/AT:N/PR:L/UI:N/VC:H/VI:H/VA:H", + "version": "4.0" } ], "exploitKnown": true, @@ -69,7 +211,7 @@ "kind": "advisory", "value": "BDU:2025-01001", "decisionReason": null, - "recordedAt": "2025-09-22T00:00:00+00:00", + "recordedAt": "2025-10-12T00:01:00+00:00", "fieldMask": [ "advisory" ] @@ -84,7 +226,7 @@ "kind": "reference", "value": "https://bdu.fstec.ru/vul/2025-01001", "decisionReason": null, - "recordedAt": "2025-09-22T00:00:00+00:00", + "recordedAt": "2025-10-12T00:01:00+00:00", "fieldMask": [ "references[]" ] @@ -100,23 +242,7 @@ "kind": "reference", "value": "https://cert.gov.ru/materialy/uyazvimosti/2025-01001", "decisionReason": null, - "recordedAt": "2025-09-22T00:00:00+00:00", - "fieldMask": [ - "references[]" - ] - }, - "sourceTag": null, - "summary": null, - "url": "https://cert.gov.ru/materialy/uyazvimosti/2025-01001" - }, - { - "kind": "details", - "provenance": { - "source": "ru-nkcki", - "kind": "reference", - "value": "https://cert.gov.ru/materialy/uyazvimosti/2025-01001", - "decisionReason": null, - "recordedAt": "2025-09-22T00:00:00+00:00", + "recordedAt": "2025-10-12T00:01:00+00:00", "fieldMask": [ "references[]" ] @@ -132,7 +258,7 @@ "kind": "reference", "value": "https://cwe.mitre.org/data/definitions/321.html", "decisionReason": null, - "recordedAt": "2025-09-22T00:00:00+00:00", + "recordedAt": "2025-10-12T00:01:00+00:00", "fieldMask": [ "references[]" ] @@ -148,7 +274,7 @@ "kind": "reference", "value": "https://vendor.example/advisories/sample-scada", "decisionReason": null, - "recordedAt": "2025-09-22T00:00:00+00:00", + "recordedAt": "2025-10-12T00:01:00+00:00", "fieldMask": [ "references[]" ] @@ -161,5 +287,209 @@ "severity": "critical", "summary": "Authenticated RCE in Sample SCADA", "title": "Authenticated RCE in Sample SCADA" + }, + { + "advisoryKey": "BDU:2024-00011", + "affectedPackages": [ + { + "type": "cpe", + "identifier": "LegacyPanel", + "platform": "Software", + "versionRanges": [ + { + "fixedVersion": null, + "introducedVersion": null, + "lastAffectedVersion": "2.5", + "primitives": { + "evr": null, + "hasVendorExtensions": false, + "nevra": null, + "semVer": { + "constraintExpression": "<= 2.5", + "exactValue": null, + "fixed": null, + "fixedInclusive": false, + "introduced": null, + "introducedInclusive": true, + "lastAffected": "2.5", + "lastAffectedInclusive": true, + "style": "lessThanOrEqual" + }, + "vendorExtensions": null + }, + "provenance": { + "source": "ru-nkcki", + "kind": "package-range", + "value": "LegacyPanel 1.0 - 2.5", + "decisionReason": null, + "recordedAt": "2025-10-12T00:01:00+00:00", + "fieldMask": [ + "affectedpackages[].versionranges[]" + ] + }, + "rangeExpression": "<= 2.5", + "rangeKind": "semver" + }, + { + "fixedVersion": null, + "introducedVersion": "1.0", + "lastAffectedVersion": null, + "primitives": { + "evr": null, + "hasVendorExtensions": false, + "nevra": null, + "semVer": { + "constraintExpression": ">= 1.0", + "exactValue": null, + "fixed": null, + "fixedInclusive": false, + "introduced": "1.0", + "introducedInclusive": true, + "lastAffected": null, + "lastAffectedInclusive": false, + "style": "greaterThanOrEqual" + }, + "vendorExtensions": null + }, + "provenance": { + "source": "ru-nkcki", + "kind": "package-range", + "value": "LegacyPanel 1.0 - 2.5", + "decisionReason": null, + "recordedAt": "2025-10-12T00:01:00+00:00", + "fieldMask": [ + "affectedpackages[].versionranges[]" + ] + }, + "rangeExpression": ">= 1.0", + "rangeKind": "semver" + } + ], + "normalizedVersions": [ + { + "scheme": "semver", + "type": "gte", + "min": "1.0", + "minInclusive": true, + "max": null, + "maxInclusive": null, + "value": null, + "notes": "LegacyPanel 1.0 - 2.5" + }, + { + "scheme": "semver", + "type": "lte", + "min": null, + "minInclusive": null, + "max": "2.5", + "maxInclusive": true, + "value": null, + "notes": "LegacyPanel 1.0 - 2.5" + } + ], + "statuses": [ + { + "provenance": { + "source": "ru-nkcki", + "kind": "package-status", + "value": "affected", + "decisionReason": null, + "recordedAt": "2025-10-12T00:01:00+00:00", + "fieldMask": [ + "affectedpackages[].statuses[]" + ] + }, + "status": "affected" + } + ], + "provenance": [ + { + "source": "ru-nkcki", + "kind": "package", + "value": "LegacyPanel 1.0 - 2.5", + "decisionReason": null, + "recordedAt": "2025-10-12T00:01:00+00:00", + "fieldMask": [ + "affectedpackages[]" + ] + } + ] + } + ], + "aliases": [ + "BDU:2024-00011" + ], + "credits": [], + "cvssMetrics": [ + { + "baseScore": 8.8, + "baseSeverity": "high", + "provenance": { + "source": "ru-nkcki", + "kind": "cvss", + "value": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H", + "decisionReason": null, + "recordedAt": "2025-10-12T00:01:00+00:00", + "fieldMask": [ + "cvssmetrics[]" + ] + }, + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H", + "version": "3.1" + } + ], + "exploitKnown": true, + "language": "ru", + "modified": "2024-08-02T00:00:00+00:00", + "provenance": [ + { + "source": "ru-nkcki", + "kind": "advisory", + "value": "BDU:2024-00011", + "decisionReason": null, + "recordedAt": "2025-10-12T00:01:00+00:00", + "fieldMask": [ + "advisory" + ] + } + ], + "published": "2024-08-01T00:00:00+00:00", + "references": [ + { + "kind": "details", + "provenance": { + "source": "ru-nkcki", + "kind": "reference", + "value": "https://bdu.fstec.ru/vul/2024-00011", + "decisionReason": null, + "recordedAt": "2025-10-12T00:01:00+00:00", + "fieldMask": [ + "references[]" + ] + }, + "sourceTag": "bdu", + "summary": null, + "url": "https://bdu.fstec.ru/vul/2024-00011" + }, + { + "kind": "details", + "provenance": { + "source": "ru-nkcki", + "kind": "reference", + "value": "https://cert.gov.ru/materialy/uyazvimosti/2024-00011", + "decisionReason": null, + "recordedAt": "2025-10-12T00:01:00+00:00", + "fieldMask": [ + "references[]" + ] + }, + "sourceTag": "ru-nkcki", + "summary": null, + "url": "https://cert.gov.ru/materialy/uyazvimosti/2024-00011" + } + ], + "severity": "high", + "summary": "Legacy panel overflow", + "title": "Legacy panel overflow" } ] \ No newline at end of file diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/RuNkckiConnectorTests.cs b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/RuNkckiConnectorTests.cs index 84f1c4f9..d4d4eae2 100644 --- a/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/RuNkckiConnectorTests.cs +++ b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/RuNkckiConnectorTests.cs @@ -33,7 +33,9 @@ namespace StellaOps.Feedser.Source.Ru.Nkcki.Tests; public sealed class RuNkckiConnectorTests : IAsyncLifetime { private static readonly Uri ListingUri = new("https://cert.gov.ru/materialy/uyazvimosti/"); + private static readonly Uri ListingPage2Uri = new("https://cert.gov.ru/materialy/uyazvimosti/?PAGEN_1=2"); private static readonly Uri BulletinUri = new("https://cert.gov.ru/materialy/uyazvimosti/bulletin-sample.json.zip"); + private static readonly Uri LegacyBulletinUri = new("https://cert.gov.ru/materialy/uyazvimosti/bulletin-legacy.json.zip"); private readonly MongoIntegrationFixture _fixture; private readonly FakeTimeProvider _timeProvider; @@ -60,13 +62,13 @@ public sealed class RuNkckiConnectorTests : IAsyncLifetime var advisoryStore = provider.GetRequiredService(); var advisories = await advisoryStore.GetRecentAsync(10, CancellationToken.None); - Assert.Single(advisories); + Assert.Equal(2, advisories.Count); var snapshot = SnapshotSerializer.ToSnapshot(advisories); WriteOrAssertSnapshot(snapshot, "nkcki-advisories.snapshot.json"); var documentStore = provider.GetRequiredService(); - var document = await documentStore.FindBySourceAndUriAsync(RuNkckiConnectorPlugin.SourceName, "https://cert.gov.ru/materialy/uyazvimosti/BDU:2025-01001", CancellationToken.None); + var document = await documentStore.FindBySourceAndUriAsync(RuNkckiConnectorPlugin.SourceName, "https://cert.gov.ru/materialy/uyazvimosti/2025-01001", CancellationToken.None); Assert.NotNull(document); Assert.Equal(DocumentStatuses.Mapped, document!.Status); @@ -85,18 +87,25 @@ public sealed class RuNkckiConnectorTests : IAsyncLifetime var connector = provider.GetRequiredService(); await connector.FetchAsync(provider, CancellationToken.None); + await connector.ParseAsync(provider, CancellationToken.None); + await connector.MapAsync(provider, CancellationToken.None); _handler.Clear(); - _handler.AddResponse(ListingUri, () => new HttpResponseMessage(HttpStatusCode.InternalServerError) + for (var i = 0; i < 3; i++) { - Content = new StringContent("error", Encoding.UTF8, "text/plain"), - }); + _handler.AddResponse(ListingUri, () => new HttpResponseMessage(HttpStatusCode.InternalServerError) + { + Content = new StringContent("error", Encoding.UTF8, "text/plain"), + }); + } var advisoryStore = provider.GetRequiredService(); var before = await advisoryStore.GetRecentAsync(10, CancellationToken.None); - Assert.NotEmpty(before); + Assert.Equal(2, before.Count); await connector.FetchAsync(provider, CancellationToken.None); + await connector.ParseAsync(provider, CancellationToken.None); + await connector.MapAsync(provider, CancellationToken.None); var after = await advisoryStore.GetRecentAsync(10, CancellationToken.None); Assert.Equal(before.Select(advisory => advisory.AdvisoryKey).OrderBy(static key => key), after.Select(advisory => advisory.AdvisoryKey).OrderBy(static key => key)); @@ -106,18 +115,7 @@ public sealed class RuNkckiConnectorTests : IAsyncLifetime private async Task BuildServiceProviderAsync() { - try - { - await _fixture.Client.DropDatabaseAsync(_fixture.Database.DatabaseNamespace.DatabaseName); - } - catch (MongoConnectionException ex) - { - Assert.Skip($"Mongo runner unavailable: {ex.Message}"); - } - catch (TimeoutException ex) - { - Assert.Skip($"Mongo runner unavailable: {ex.Message}"); - } + await _fixture.Client.DropDatabaseAsync(_fixture.Database.DatabaseNamespace.DatabaseName); _handler.Clear(); @@ -138,7 +136,9 @@ public sealed class RuNkckiConnectorTests : IAsyncLifetime options.BaseAddress = new Uri("https://cert.gov.ru/"); options.ListingPath = "/materialy/uyazvimosti/"; options.MaxBulletinsPerFetch = 2; + options.MaxListingPagesPerFetch = 2; options.MaxVulnerabilitiesPerFetch = 50; + options.ListingCacheDuration = TimeSpan.Zero; var cacheRoot = Path.Combine(Path.GetTempPath(), "stellaops-tests", _fixture.Database.DatabaseNamespace.DatabaseName); Directory.CreateDirectory(cacheRoot); options.CacheDirectory = Path.Combine(cacheRoot, "ru-nkcki"); @@ -150,23 +150,10 @@ public sealed class RuNkckiConnectorTests : IAsyncLifetime builderOptions.HttpMessageHandlerBuilderActions.Add(builder => builder.PrimaryHandler = _handler); }); - try - { - var provider = services.BuildServiceProvider(); - var bootstrapper = provider.GetRequiredService(); - await bootstrapper.InitializeAsync(CancellationToken.None); - return provider; - } - catch (MongoConnectionException ex) - { - Assert.Skip($"Mongo runner unavailable: {ex.Message}"); - throw; // Unreachable - } - catch (TimeoutException ex) - { - Assert.Skip($"Mongo runner unavailable: {ex.Message}"); - throw; - } + var provider = services.BuildServiceProvider(); + var bootstrapper = provider.GetRequiredService(); + await bootstrapper.InitializeAsync(CancellationToken.None); + return provider; } private void SeedListingAndBulletin() @@ -174,6 +161,9 @@ public sealed class RuNkckiConnectorTests : IAsyncLifetime var listingHtml = ReadFixture("listing.html"); _handler.AddTextResponse(ListingUri, listingHtml, "text/html"); + var listingPage2Html = ReadFixture("listing-page2.html"); + _handler.AddTextResponse(ListingPage2Uri, listingPage2Html, "text/html"); + var bulletinBytes = ReadBulletinFixture("bulletin-sample.json.zip"); _handler.AddResponse(BulletinUri, () => { @@ -185,6 +175,18 @@ public sealed class RuNkckiConnectorTests : IAsyncLifetime response.Content.Headers.LastModified = new DateTimeOffset(2025, 9, 22, 0, 0, 0, TimeSpan.Zero); return response; }); + + var legacyBytes = ReadBulletinFixture("bulletin-legacy.json.zip"); + _handler.AddResponse(LegacyBulletinUri, () => + { + var response = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(legacyBytes), + }; + response.Content.Headers.ContentType = new MediaTypeHeaderValue("application/zip"); + response.Content.Headers.LastModified = new DateTimeOffset(2024, 8, 2, 0, 0, 0, TimeSpan.Zero); + return response; + }); } private static bool IsEmptyArray(BsonDocument document, string field) diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/RuNkckiJsonParserTests.cs b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/RuNkckiJsonParserTests.cs index 90e739c2..1f9f18cb 100644 --- a/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/RuNkckiJsonParserTests.cs +++ b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/RuNkckiJsonParserTests.cs @@ -18,14 +18,24 @@ public sealed class RuNkckiJsonParserTests "patch_available": true, "description": "Test description", "cwe": {"cwe_number": 79, "cwe_description": "Cross-site scripting"}, - "product_category": "Web", - "mitigation": "Apply update", - "vulnerable_software": {"software_text": "ExampleApp 1.0", "cpe": false}, - "cvss": {"cvss_score": 8.8, "cvss_vector": "AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H", "cvss_score_v4": 5.5, "cvss_vector_v4": "AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:H/VA:H"}, + "product_category": ["Web", "CMS"], + "mitigation": ["Apply update", "Review configuration"], + "vulnerable_software": { + "software_text": "ExampleCMS <= 1.0", + "software": [{"vendor": "Example", "name": "ExampleCMS", "version": "<= 1.0"}], + "cpe": false + }, + "cvss": { + "cvss_score": 8.8, + "cvss_vector": "CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H", + "cvss_score_v4": 5.5, + "cvss_vector_v4": "CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:H/VA:H" + }, "impact": "ACE", "method_of_exploitation": "Special request", "user_interaction": false, - "urls": ["https://example.com/advisory", "https://cert.gov.ru/materialy/uyazvimosti/2025-00001"] + "urls": ["https://example.com/advisory", {"url": "https://cert.gov.ru/materialy/uyazvimosti/2025-00001"}], + "tags": ["cms"] } """; @@ -35,9 +45,16 @@ public sealed class RuNkckiJsonParserTests Assert.Equal("BDU:2025-00001", dto.FstecId); Assert.Equal("CVE-2025-0001", dto.MitreId); Assert.Equal(8.8, dto.CvssScore); - Assert.Equal("AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H", dto.CvssVector); + Assert.Equal("CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H", dto.CvssVector); Assert.True(dto.PatchAvailable); Assert.Equal(79, dto.Cwe?.Number); + Assert.Contains("Web", dto.ProductCategories); + Assert.Contains("CMS", dto.ProductCategories); + Assert.Single(dto.VulnerableSoftwareEntries); + var entry = dto.VulnerableSoftwareEntries[0]; + Assert.Equal("Example ExampleCMS", entry.Identifier); + Assert.Contains("<= 1.0", entry.RangeExpressions); Assert.Equal(2, dto.Urls.Length); + Assert.Contains("cms", dto.Tags); } } diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/RuNkckiMapperTests.cs b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/RuNkckiMapperTests.cs index cd74a046..acb44e79 100644 --- a/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/RuNkckiMapperTests.cs +++ b/src/StellaOps.Feedser.Source.Ru.Nkcki.Tests/RuNkckiMapperTests.cs @@ -14,6 +14,12 @@ public sealed class RuNkckiMapperTests [Fact] public void Map_ConstructsCanonicalAdvisory() { + var softwareEntries = ImmutableArray.Create( + new RuNkckiSoftwareEntry( + "SampleVendor SampleSCADA", + "SampleVendor SampleSCADA <= 4.2", + ImmutableArray.Create("<= 4.2"))); + var dto = new RuNkckiVulnerabilityDto( FstecId: "BDU:2025-00001", MitreId: "CVE-2025-0001", @@ -23,18 +29,20 @@ public sealed class RuNkckiMapperTests PatchAvailable: true, Description: "Test NKCKI vulnerability", Cwe: new RuNkckiCweDto(79, "Cross-site scripting"), - ProductCategory: "Web", + ProductCategories: ImmutableArray.Create("ICS", "Automation"), Mitigation: "Apply update", - VulnerableSoftwareText: "ExampleApp <= 1.0", + VulnerableSoftwareText: null, VulnerableSoftwareHasCpe: false, + VulnerableSoftwareEntries: softwareEntries, CvssScore: 8.8, - CvssVector: "AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H", - CvssScoreV4: null, - CvssVectorV4: null, + CvssVector: "CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H", + CvssScoreV4: 6.4, + CvssVectorV4: "CVSS:4.0/AV:N/AC:H/AT:N/PR:L/UI:N/VC:H/VI:H/VA:H", Impact: "ACE", MethodOfExploitation: "Special request", UserInteraction: false, - Urls: ImmutableArray.Create("https://example.com/advisory")); + Urls: ImmutableArray.Create("https://example.com/advisory", "https://cert.gov.ru/materialy/uyazvimosti/2025-00001"), + Tags: ImmutableArray.Create("ics")); var document = new DocumentRecord( Guid.NewGuid(), @@ -62,7 +70,12 @@ public sealed class RuNkckiMapperTests Assert.Equal("critical", advisory.Severity); Assert.True(advisory.ExploitKnown); Assert.Single(advisory.AffectedPackages); - Assert.Single(advisory.CvssMetrics); + var package = advisory.AffectedPackages[0]; + Assert.Equal(AffectedPackageTypes.IcsVendor, package.Type); + Assert.Single(package.NormalizedVersions); + Assert.Equal(2, advisory.CvssMetrics.Length); + Assert.Contains(advisory.CvssMetrics, metric => metric.Version == "4.0"); + Assert.Equal("critical", advisory.Severity); Assert.Contains(advisory.References, reference => reference.Url.Contains("example.com", StringComparison.OrdinalIgnoreCase)); } } diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki/Configuration/RuNkckiOptions.cs b/src/StellaOps.Feedser.Source.Ru.Nkcki/Configuration/RuNkckiOptions.cs index c46aa5bb..cf3a1f66 100644 --- a/src/StellaOps.Feedser.Source.Ru.Nkcki/Configuration/RuNkckiOptions.cs +++ b/src/StellaOps.Feedser.Source.Ru.Nkcki/Configuration/RuNkckiOptions.cs @@ -38,6 +38,11 @@ public sealed class RuNkckiOptions /// public int MaxBulletinsPerFetch { get; set; } = 5; + /// + /// Maximum number of listing pages visited per fetch cycle. + /// + public int MaxListingPagesPerFetch { get; set; } = 3; + /// /// Maximum number of vulnerabilities ingested per fetch cycle across all attachments. /// @@ -99,6 +104,11 @@ public sealed class RuNkckiOptions throw new InvalidOperationException("RuNkcki MaxBulletinsPerFetch must be greater than zero."); } + if (MaxListingPagesPerFetch <= 0) + { + throw new InvalidOperationException("RuNkcki MaxListingPagesPerFetch must be greater than zero."); + } + if (MaxVulnerabilitiesPerFetch <= 0) { throw new InvalidOperationException("RuNkcki MaxVulnerabilitiesPerFetch must be greater than zero."); diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki/Internal/RuNkckiDiagnostics.cs b/src/StellaOps.Feedser.Source.Ru.Nkcki/Internal/RuNkckiDiagnostics.cs new file mode 100644 index 00000000..9c4fa9ba --- /dev/null +++ b/src/StellaOps.Feedser.Source.Ru.Nkcki/Internal/RuNkckiDiagnostics.cs @@ -0,0 +1,115 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.Metrics; + +namespace StellaOps.Feedser.Source.Ru.Nkcki.Internal; + +/// +/// Emits telemetry counters for the NKCKI connector lifecycle. +/// +public sealed class RuNkckiDiagnostics : IDisposable +{ + private const string MeterName = "StellaOps.Feedser.Source.Ru.Nkcki"; + private const string MeterVersion = "1.0.0"; + + private readonly Meter _meter; + private readonly Counter _listingFetchAttempts; + private readonly Counter _listingFetchSuccess; + private readonly Counter _listingFetchFailures; + private readonly Histogram _listingPagesVisited; + private readonly Histogram _listingAttachmentsDiscovered; + private readonly Histogram _listingAttachmentsNew; + private readonly Counter _bulletinFetchSuccess; + private readonly Counter _bulletinFetchCached; + private readonly Counter _bulletinFetchFailures; + private readonly Histogram _entriesProcessed; + + public RuNkckiDiagnostics() + { + _meter = new Meter(MeterName, MeterVersion); + _listingFetchAttempts = _meter.CreateCounter( + "nkcki.listing.fetch.attempts", + unit: "operations", + description: "Number of listing fetch attempts."); + _listingFetchSuccess = _meter.CreateCounter( + "nkcki.listing.fetch.success", + unit: "operations", + description: "Number of successful listing fetches."); + _listingFetchFailures = _meter.CreateCounter( + "nkcki.listing.fetch.failures", + unit: "operations", + description: "Number of listing fetch failures."); + _listingPagesVisited = _meter.CreateHistogram( + "nkcki.listing.pages.visited", + unit: "pages", + description: "Listing pages visited per fetch cycle."); + _listingAttachmentsDiscovered = _meter.CreateHistogram( + "nkcki.listing.attachments.discovered", + unit: "attachments", + description: "Attachments discovered across listing pages."); + _listingAttachmentsNew = _meter.CreateHistogram( + "nkcki.listing.attachments.new", + unit: "attachments", + description: "New bulletin attachments enqueued per fetch cycle."); + _bulletinFetchSuccess = _meter.CreateCounter( + "nkcki.bulletin.fetch.success", + unit: "operations", + description: "Number of bulletin downloads that succeeded."); + _bulletinFetchCached = _meter.CreateCounter( + "nkcki.bulletin.fetch.cached", + unit: "operations", + description: "Number of bulletins served from cache."); + _bulletinFetchFailures = _meter.CreateCounter( + "nkcki.bulletin.fetch.failures", + unit: "operations", + description: "Number of bulletin download failures."); + _entriesProcessed = _meter.CreateHistogram( + "nkcki.entries.processed", + unit: "entries", + description: "Number of vulnerability entries processed per bulletin."); + } + + public void ListingFetchAttempt() => _listingFetchAttempts.Add(1); + + public void ListingFetchSuccess(int pagesVisited, int attachmentsDiscovered, int attachmentsNew) + { + _listingFetchSuccess.Add(1); + if (pagesVisited >= 0) + { + _listingPagesVisited.Record(pagesVisited); + } + + if (attachmentsDiscovered >= 0) + { + _listingAttachmentsDiscovered.Record(attachmentsDiscovered); + } + + if (attachmentsNew >= 0) + { + _listingAttachmentsNew.Record(attachmentsNew); + } + } + + public void ListingFetchFailure(string reason) + => _listingFetchFailures.Add(1, ReasonTag(reason)); + + public void BulletinFetchSuccess() => _bulletinFetchSuccess.Add(1); + + public void BulletinFetchCached() => _bulletinFetchCached.Add(1); + + public void BulletinFetchFailure(string reason) + => _bulletinFetchFailures.Add(1, ReasonTag(reason)); + + public void EntriesProcessed(int count) + { + if (count >= 0) + { + _entriesProcessed.Record(count); + } + } + + private static KeyValuePair ReasonTag(string reason) + => new("reason", string.IsNullOrWhiteSpace(reason) ? "unknown" : reason.ToLowerInvariant()); + + public void Dispose() => _meter.Dispose(); +} diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki/Internal/RuNkckiJsonParser.cs b/src/StellaOps.Feedser.Source.Ru.Nkcki/Internal/RuNkckiJsonParser.cs index f7abf429..aa706f83 100644 --- a/src/StellaOps.Feedser.Source.Ru.Nkcki/Internal/RuNkckiJsonParser.cs +++ b/src/StellaOps.Feedser.Source.Ru.Nkcki/Internal/RuNkckiJsonParser.cs @@ -1,16 +1,47 @@ +using System; +using System.Collections.Generic; using System.Collections.Immutable; -using System.Linq; using System.Globalization; +using System.Linq; using System.Text.Json; +using System.Text.RegularExpressions; namespace StellaOps.Feedser.Source.Ru.Nkcki.Internal; internal static class RuNkckiJsonParser { + private static readonly Regex ComparatorRegex = new( + @"^(?.+?)\s*(?<=|>=|<|>|==|=)\s*(?.+?)$", + RegexOptions.Compiled | RegexOptions.CultureInvariant); + + private static readonly Regex RangeRegex = new( + @"^(?.+?)\s+(?[\p{L}\p{N}\._-]+)\s*[-–]\s*(?[\p{L}\p{N}\._-]+)$", + RegexOptions.Compiled | RegexOptions.CultureInvariant); + + private static readonly Regex QualifierRegex = new( + @"^(?.+?)\s+(?[\p{L}\p{N}\._-]+)\s+(?(and\s+earlier|and\s+later|and\s+newer|до\s+и\s+включительно|и\s+ниже|и\s+выше|и\s+старше|и\s+позже))$", + RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase); + + private static readonly Regex QualifierInlineRegex = new( + @"верс(ии|ия)\s+(?[\p{L}\p{N}\._-]+)\s+(?и\s+ниже|и\s+выше|и\s+старше)", + RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase); + + private static readonly Regex VersionWindowRegex = new( + @"верс(ии|ия)\s+(?[\p{L}\p{N}\._-]+)\s+по\s+(?[\p{L}\p{N}\._-]+)", + RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase); + + private static readonly char[] SoftwareSplitDelimiters = { '\n', ';', '\u2022', '\u2023', '\r' }; + + private static readonly StringComparer OrdinalIgnoreCase = StringComparer.OrdinalIgnoreCase; + public static RuNkckiVulnerabilityDto Parse(JsonElement element) { - var fstecId = element.TryGetProperty("vuln_id", out var vulnIdElement) && vulnIdElement.TryGetProperty("FSTEC", out var fstec) ? Normalize(fstec.GetString()) : null; - var mitreId = element.TryGetProperty("vuln_id", out vulnIdElement) && vulnIdElement.TryGetProperty("MITRE", out var mitre) ? Normalize(mitre.GetString()) : null; + var fstecId = element.TryGetProperty("vuln_id", out var vulnIdElement) && vulnIdElement.TryGetProperty("FSTEC", out var fstec) + ? Normalize(fstec.GetString()) + : null; + var mitreId = element.TryGetProperty("vuln_id", out vulnIdElement) && vulnIdElement.TryGetProperty("MITRE", out var mitre) + ? Normalize(mitre.GetString()) + : null; var datePublished = ParseDate(element.TryGetProperty("date_published", out var published) ? published.GetString() : null); var dateUpdated = ParseDate(element.TryGetProperty("date_updated", out var updated) ? updated.GetString() : null); @@ -22,12 +53,11 @@ internal static class RuNkckiJsonParser _ => null, } : null; - var description = Normalize(element.TryGetProperty("description", out var desc) ? desc.GetString() : null); - var mitigation = Normalize(element.TryGetProperty("mitigation", out var mitigationElement) ? mitigationElement.GetString() : null); - var productCategory = Normalize(element.TryGetProperty("product_category", out var category) ? category.GetString() : null); - var impact = Normalize(element.TryGetProperty("impact", out var impactElement) ? impactElement.GetString() : null); - var method = Normalize(element.TryGetProperty("method_of_exploitation", out var methodElement) ? methodElement.GetString() : null); - + var description = ReadJoinedString(element, "description"); + var mitigation = ReadJoinedString(element, "mitigation"); + var productCategories = ReadStringCollection(element, "product_category"); + var impact = ReadJoinedString(element, "impact"); + var method = ReadJoinedString(element, "method_of_exploitation"); bool? userInteraction = element.TryGetProperty("user_interaction", out var uiElement) ? uiElement.ValueKind switch { JsonValueKind.True => true, @@ -35,25 +65,7 @@ internal static class RuNkckiJsonParser _ => null, } : null; - string? softwareText = null; - bool? softwareHasCpe = null; - if (element.TryGetProperty("vulnerable_software", out var softwareElement)) - { - if (softwareElement.TryGetProperty("software_text", out var textElement)) - { - softwareText = Normalize(textElement.GetString()?.Replace('\r', ' ')); - } - - if (softwareElement.TryGetProperty("cpe", out var cpeElement)) - { - softwareHasCpe = cpeElement.ValueKind switch - { - JsonValueKind.True => true, - JsonValueKind.False => false, - _ => null, - }; - } - } + var (softwareText, softwareHasCpe, softwareEntries) = ParseVulnerableSoftware(element); RuNkckiCweDto? cweDto = null; if (element.TryGetProperty("cwe", out var cweElement)) @@ -71,7 +83,7 @@ internal static class RuNkckiJsonParser } } - var cweDescription = Normalize(cweElement.TryGetProperty("cwe_description", out var descElement) ? descElement.GetString() : null); + var cweDescription = ReadJoinedString(cweElement, "cwe_description") ?? Normalize(cweElement.GetString()); if (number.HasValue || !string.IsNullOrWhiteSpace(cweDescription)) { cweDto = new RuNkckiCweDto(number, cweDescription); @@ -91,13 +103,8 @@ internal static class RuNkckiJsonParser ? Normalize(vectorV4Element.GetString()) : null; - var urls = element.TryGetProperty("urls", out var urlsElement) && urlsElement.ValueKind == JsonValueKind.Array - ? urlsElement.EnumerateArray() - .Select(static url => Normalize(url.GetString())) - .Where(static url => !string.IsNullOrWhiteSpace(url)) - .Cast() - .ToImmutableArray() - : ImmutableArray.Empty; + var urls = ReadUrls(element); + var tags = ReadStringCollection(element, "tags"); return new RuNkckiVulnerabilityDto( fstecId, @@ -108,10 +115,11 @@ internal static class RuNkckiJsonParser patchAvailable, description, cweDto, - productCategory, + productCategories, mitigation, softwareText, softwareHasCpe, + softwareEntries, cvssScore, cvssVector, cvssScoreV4, @@ -119,7 +127,466 @@ internal static class RuNkckiJsonParser impact, method, userInteraction, - urls); + urls, + tags); + } + + private static ImmutableArray ReadUrls(JsonElement element) + { + if (!element.TryGetProperty("urls", out var urlsElement)) + { + return ImmutableArray.Empty; + } + + var collected = new List(); + CollectUrls(urlsElement, collected); + if (collected.Count == 0) + { + return ImmutableArray.Empty; + } + + return collected + .Select(Normalize) + .Where(static url => !string.IsNullOrWhiteSpace(url)) + .Select(static url => url!) + .Distinct(OrdinalIgnoreCase) + .OrderBy(static url => url, StringComparer.OrdinalIgnoreCase) + .ToImmutableArray(); + } + + private static void CollectUrls(JsonElement element, ICollection results) + { + switch (element.ValueKind) + { + case JsonValueKind.String: + var value = element.GetString(); + if (!string.IsNullOrWhiteSpace(value)) + { + results.Add(value); + } + break; + case JsonValueKind.Array: + foreach (var child in element.EnumerateArray()) + { + CollectUrls(child, results); + } + break; + case JsonValueKind.Object: + if (element.TryGetProperty("url", out var urlProperty)) + { + CollectUrls(urlProperty, results); + } + + if (element.TryGetProperty("href", out var hrefProperty)) + { + CollectUrls(hrefProperty, results); + } + + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("value") || property.NameEquals("link")) + { + CollectUrls(property.Value, results); + } + } + break; + } + } + + private static string? ReadJoinedString(JsonElement element, string property) + { + if (!element.TryGetProperty(property, out var target)) + { + return null; + } + + var values = ReadStringCollection(target); + if (!values.IsDefaultOrEmpty) + { + return string.Join("; ", values); + } + + return Normalize(target.ValueKind == JsonValueKind.String ? target.GetString() : target.ToString()); + } + + private static ImmutableArray ReadStringCollection(JsonElement element, string property) + { + if (!element.TryGetProperty(property, out var target)) + { + return ImmutableArray.Empty; + } + + return ReadStringCollection(target); + } + + private static ImmutableArray ReadStringCollection(JsonElement element) + { + var builder = ImmutableArray.CreateBuilder(); + CollectStrings(element, builder); + return Deduplicate(builder); + } + + private static void CollectStrings(JsonElement element, ImmutableArray.Builder builder) + { + switch (element.ValueKind) + { + case JsonValueKind.String: + AddIfPresent(builder, Normalize(element.GetString())); + break; + case JsonValueKind.Number: + AddIfPresent(builder, Normalize(element.ToString())); + break; + case JsonValueKind.True: + builder.Add("true"); + break; + case JsonValueKind.False: + builder.Add("false"); + break; + case JsonValueKind.Array: + foreach (var child in element.EnumerateArray()) + { + CollectStrings(child, builder); + } + break; + case JsonValueKind.Object: + foreach (var property in element.EnumerateObject()) + { + CollectStrings(property.Value, builder); + } + break; + } + } + + private static ImmutableArray Deduplicate(ImmutableArray.Builder builder) + { + if (builder.Count == 0) + { + return ImmutableArray.Empty; + } + + return builder + .Where(static value => !string.IsNullOrWhiteSpace(value)) + .Distinct(OrdinalIgnoreCase) + .OrderBy(static value => value, StringComparer.OrdinalIgnoreCase) + .ToImmutableArray(); + } + + private static void AddIfPresent(ImmutableArray.Builder builder, string? value) + { + if (!string.IsNullOrWhiteSpace(value)) + { + builder.Add(value!); + } + } + + private static (string? Text, bool? HasCpe, ImmutableArray Entries) ParseVulnerableSoftware(JsonElement element) + { + if (!element.TryGetProperty("vulnerable_software", out var softwareElement)) + { + return (null, null, ImmutableArray.Empty); + } + + string? softwareText = null; + if (softwareElement.TryGetProperty("software_text", out var textElement)) + { + softwareText = Normalize(textElement.ValueKind == JsonValueKind.String ? textElement.GetString() : textElement.ToString()); + } + + bool? softwareHasCpe = null; + if (softwareElement.TryGetProperty("cpe", out var cpeElement)) + { + softwareHasCpe = cpeElement.ValueKind switch + { + JsonValueKind.True => true, + JsonValueKind.False => false, + _ => softwareHasCpe, + }; + } + + var entries = new List(); + if (softwareElement.TryGetProperty("software", out var softwareNodes)) + { + entries.AddRange(ParseSoftwareEntries(softwareNodes)); + } + + if (entries.Count == 0 && !string.IsNullOrWhiteSpace(softwareText)) + { + entries.AddRange(SplitSoftwareTextIntoEntries(softwareText)); + } + + if (entries.Count == 0) + { + foreach (var fallbackProperty in new[] { "items", "aliases", "software_lines" }) + { + if (softwareElement.TryGetProperty(fallbackProperty, out var fallbackNodes)) + { + entries.AddRange(ParseSoftwareEntries(fallbackNodes)); + } + } + } + + if (entries.Count == 0) + { + return (softwareText, softwareHasCpe, ImmutableArray.Empty); + } + + var grouped = entries + .GroupBy(static entry => entry.Identifier, OrdinalIgnoreCase) + .Select(static group => + { + var evidence = string.Join( + "; ", + group.Select(static entry => entry.Evidence) + .Where(static evidence => !string.IsNullOrWhiteSpace(evidence)) + .Distinct(OrdinalIgnoreCase)); + + var ranges = group + .SelectMany(static entry => entry.RangeExpressions) + .Where(static range => !string.IsNullOrWhiteSpace(range)) + .Distinct(OrdinalIgnoreCase) + .OrderBy(static range => range, StringComparer.OrdinalIgnoreCase) + .ToImmutableArray(); + + return new RuNkckiSoftwareEntry( + group.Key, + string.IsNullOrWhiteSpace(evidence) ? group.Key : evidence, + ranges); + }) + .OrderBy(static entry => entry.Identifier, StringComparer.OrdinalIgnoreCase) + .ToImmutableArray(); + + return (softwareText, softwareHasCpe, grouped); + } + + private static IEnumerable ParseSoftwareEntries(JsonElement element) + { + switch (element.ValueKind) + { + case JsonValueKind.Array: + foreach (var child in element.EnumerateArray()) + { + foreach (var entry in ParseSoftwareEntries(child)) + { + yield return entry; + } + } + break; + case JsonValueKind.Object: + yield return CreateEntryFromObject(element); + break; + case JsonValueKind.String: + foreach (var entry in SplitSoftwareTextIntoEntries(element.GetString() ?? string.Empty)) + { + yield return entry; + } + break; + } + } + + private static RuNkckiSoftwareEntry CreateEntryFromObject(JsonElement element) + { + var vendor = ReadFirstString(element, "vendor", "manufacturer", "organisation"); + var name = ReadFirstString(element, "name", "product", "title"); + var rawVersion = ReadFirstString(element, "version", "versions", "range"); + var comment = ReadFirstString(element, "comment", "notes", "summary"); + + var identifierParts = new List(); + if (!string.IsNullOrWhiteSpace(vendor)) + { + identifierParts.Add(vendor!); + } + + if (!string.IsNullOrWhiteSpace(name)) + { + identifierParts.Add(name!); + } + + var identifier = identifierParts.Count > 0 + ? string.Join(" ", identifierParts) + : ReadFirstString(element, "identifier") ?? name ?? rawVersion ?? comment ?? "unknown"; + + var evidenceParts = new List(identifierParts); + if (!string.IsNullOrWhiteSpace(rawVersion)) + { + evidenceParts.Add(rawVersion!); + } + + if (!string.IsNullOrWhiteSpace(comment)) + { + evidenceParts.Add(comment!); + } + + var evidence = string.Join(" ", evidenceParts.Where(static part => !string.IsNullOrWhiteSpace(part))).Trim(); + + var rangeHints = new List(); + if (!string.IsNullOrWhiteSpace(rawVersion)) + { + rangeHints.Add(rawVersion); + } + + if (element.TryGetProperty("range", out var rangeElement)) + { + rangeHints.Add(Normalize(rangeElement.ToString())); + } + + return CreateSoftwareEntry(identifier!, evidence, rangeHints); + } + + private static IEnumerable SplitSoftwareTextIntoEntries(string text) + { + if (string.IsNullOrWhiteSpace(text)) + { + yield break; + } + + var segments = text.Split(SoftwareSplitDelimiters, StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (segments.Length == 0) + { + segments = new[] { text }; + } + + foreach (var segment in segments) + { + var normalized = Normalize(segment); + if (string.IsNullOrWhiteSpace(normalized)) + { + continue; + } + + var (identifier, hints) = ExtractIdentifierAndRangeHints(normalized!); + yield return CreateSoftwareEntry(identifier, normalized!, hints); + } + } + + private static RuNkckiSoftwareEntry CreateSoftwareEntry(string identifier, string evidence, IEnumerable hints) + { + var normalizedIdentifier = Normalize(identifier) ?? "unknown"; + var normalizedEvidence = Normalize(evidence) ?? normalizedIdentifier; + + var ranges = hints + .Select(NormalizeRangeHint) + .Where(static hint => !string.IsNullOrWhiteSpace(hint)) + .Select(static hint => hint!) + .Distinct(OrdinalIgnoreCase) + .OrderBy(static hint => hint, StringComparer.OrdinalIgnoreCase) + .ToImmutableArray(); + + return new RuNkckiSoftwareEntry(normalizedIdentifier, normalizedEvidence!, ranges); + } + + private static string? NormalizeRangeHint(string? hint) + { + if (string.IsNullOrWhiteSpace(hint)) + { + return null; + } + + var normalized = Normalize(hint)? + .Replace("≤", "<=", StringComparison.Ordinal) + .Replace("≥", ">=", StringComparison.Ordinal) + .Replace("=>", ">=", StringComparison.Ordinal) + .Replace("=<", "<=", StringComparison.Ordinal); + + if (string.IsNullOrWhiteSpace(normalized)) + { + return null; + } + + return normalized; + } + + private static (string Identifier, IReadOnlyList RangeHints) ExtractIdentifierAndRangeHints(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return ("unknown", Array.Empty()); + } + + var comparatorMatch = ComparatorRegex.Match(value); + if (comparatorMatch.Success) + { + var name = Normalize(comparatorMatch.Groups["name"].Value); + var version = Normalize(comparatorMatch.Groups["version"].Value); + var op = comparatorMatch.Groups["operator"].Value; + return (string.IsNullOrWhiteSpace(name) ? value : name!, new[] { $"{op} {version}" }); + } + + var rangeMatch = RangeRegex.Match(value); + if (rangeMatch.Success) + { + var name = Normalize(rangeMatch.Groups["name"].Value); + var start = Normalize(rangeMatch.Groups["start"].Value); + var end = Normalize(rangeMatch.Groups["end"].Value); + return (string.IsNullOrWhiteSpace(name) ? value : name!, new[] { $">= {start}", $"<= {end}" }); + } + + var qualifierMatch = QualifierRegex.Match(value); + if (qualifierMatch.Success) + { + var name = Normalize(qualifierMatch.Groups["name"].Value); + var version = Normalize(qualifierMatch.Groups["version"].Value); + var qualifier = qualifierMatch.Groups["qualifier"].Value.ToLowerInvariant(); + var hint = qualifier.Contains("ниж") || qualifier.Contains("earlier") || qualifier.Contains("включ") + ? $"<= {version}" + : $">= {version}"; + return (string.IsNullOrWhiteSpace(name) ? value : name!, new[] { hint }); + } + + var inlineQualifierMatch = QualifierInlineRegex.Match(value); + if (inlineQualifierMatch.Success) + { + var version = Normalize(inlineQualifierMatch.Groups["version"].Value); + var qualifier = inlineQualifierMatch.Groups["qualifier"].Value.ToLowerInvariant(); + var hint = qualifier.Contains("ниж") ? $"<= {version}" : $">= {version}"; + var name = Normalize(QualifierInlineRegex.Replace(value, string.Empty)); + return (string.IsNullOrWhiteSpace(name) ? value : name!, new[] { hint }); + } + + var windowMatch = VersionWindowRegex.Match(value); + if (windowMatch.Success) + { + var start = Normalize(windowMatch.Groups["start"].Value); + var end = Normalize(windowMatch.Groups["end"].Value); + var name = Normalize(VersionWindowRegex.Replace(value, string.Empty)); + return (string.IsNullOrWhiteSpace(name) ? value : name!, new[] { $">= {start}", $"<= {end}" }); + } + + return (value, Array.Empty()); + } + + private static string? ReadFirstString(JsonElement element, params string[] names) + { + foreach (var name in names) + { + if (element.TryGetProperty(name, out var property)) + { + switch (property.ValueKind) + { + case JsonValueKind.String: + { + var normalized = Normalize(property.GetString()); + if (!string.IsNullOrWhiteSpace(normalized)) + { + return normalized; + } + + break; + } + case JsonValueKind.Number: + { + var normalized = Normalize(property.ToString()); + if (!string.IsNullOrWhiteSpace(normalized)) + { + return normalized; + } + + break; + } + } + } + } + + return null; } private static double? ParseDouble(JsonElement element) @@ -164,6 +631,16 @@ internal static class RuNkckiJsonParser return null; } - return value.Replace('\r', ' ').Replace('\n', ' ').Trim(); + var normalized = value + .Replace('\r', ' ') + .Replace('\n', ' ') + .Trim(); + + while (normalized.Contains(" ", StringComparison.Ordinal)) + { + normalized = normalized.Replace(" ", " ", StringComparison.Ordinal); + } + + return normalized.Length == 0 ? null : normalized; } } diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki/Internal/RuNkckiMapper.cs b/src/StellaOps.Feedser.Source.Ru.Nkcki/Internal/RuNkckiMapper.cs index ad348ca9..51a57b85 100644 --- a/src/StellaOps.Feedser.Source.Ru.Nkcki/Internal/RuNkckiMapper.cs +++ b/src/StellaOps.Feedser.Source.Ru.Nkcki/Internal/RuNkckiMapper.cs @@ -4,6 +4,7 @@ using System.Globalization; using System.Linq; using StellaOps.Feedser.Models; using StellaOps.Feedser.Normalization.Cvss; +using StellaOps.Feedser.Normalization.SemVer; using StellaOps.Feedser.Storage.Mongo.Documents; namespace StellaOps.Feedser.Source.Ru.Nkcki.Internal; @@ -80,56 +81,56 @@ internal static class RuNkckiMapper private static IReadOnlyList BuildReferences(RuNkckiVulnerabilityDto dto, DocumentRecord document, DateTimeOffset recordedAt) { - var references = new List + var references = new List(); + var seen = new HashSet(StringComparer.OrdinalIgnoreCase); + + void AddReference(string? url, string kind, string? sourceTag, string? summary) { - new(document.Uri, "details", "ru-nkcki", summary: null, new AdvisoryProvenance( + if (string.IsNullOrWhiteSpace(url)) + { + return; + } + + var key = $"{kind}|{url}"; + if (!seen.Add(key)) + { + return; + } + + var provenance = new AdvisoryProvenance( RuNkckiConnectorPlugin.SourceName, "reference", - document.Uri, + url, recordedAt, - new[] { ProvenanceFieldMasks.References })) - }; + new[] { ProvenanceFieldMasks.References }); + + references.Add(new AdvisoryReference(url, kind, sourceTag, summary, provenance)); + } + + AddReference(document.Uri, "details", "ru-nkcki", null); if (!string.IsNullOrWhiteSpace(dto.FstecId)) { var slug = dto.FstecId!.Contains(':', StringComparison.Ordinal) ? dto.FstecId[(dto.FstecId.IndexOf(':') + 1)..] : dto.FstecId; - var bduUrl = $"https://bdu.fstec.ru/vul/{slug}"; - references.Add(new AdvisoryReference(bduUrl, "details", "bdu", summary: null, new AdvisoryProvenance( - RuNkckiConnectorPlugin.SourceName, - "reference", - bduUrl, - recordedAt, - new[] { ProvenanceFieldMasks.References }))); + AddReference($"https://bdu.fstec.ru/vul/{slug}", "details", "bdu", null); } foreach (var url in dto.Urls) { - if (string.IsNullOrWhiteSpace(url)) - { - continue; - } - var kind = url.Contains("cert.gov.ru", StringComparison.OrdinalIgnoreCase) ? "details" : "external"; var sourceTag = url.Contains("siemens", StringComparison.OrdinalIgnoreCase) ? "vendor" : null; - references.Add(new AdvisoryReference(url, kind, sourceTag, summary: null, new AdvisoryProvenance( - RuNkckiConnectorPlugin.SourceName, - "reference", - url, - recordedAt, - new[] { ProvenanceFieldMasks.References }))); + AddReference(url, kind, sourceTag, null); } if (dto.Cwe?.Number is int number) { - var url = $"https://cwe.mitre.org/data/definitions/{number}.html"; - references.Add(new AdvisoryReference(url, "cwe", "cwe", dto.Cwe.Description, new AdvisoryProvenance( - RuNkckiConnectorPlugin.SourceName, - "reference", - url, - recordedAt, - new[] { ProvenanceFieldMasks.References }))); + AddReference( + $"https://cwe.mitre.org/data/definitions/{number}.html", + "cwe", + "cwe", + dto.Cwe.Description); } return references; @@ -137,43 +138,68 @@ internal static class RuNkckiMapper private static IReadOnlyList BuildPackages(RuNkckiVulnerabilityDto dto, DateTimeOffset recordedAt) { - if (string.IsNullOrWhiteSpace(dto.VulnerableSoftwareText)) + if (!dto.VulnerableSoftwareEntries.IsDefaultOrEmpty && dto.VulnerableSoftwareEntries.Length > 0) { - return Array.Empty(); + return CreatePackages(dto.VulnerableSoftwareEntries, dto, recordedAt); } - var identifier = dto.VulnerableSoftwareText!.Replace('\n', ' ').Replace('\r', ' ').Trim(); - if (identifier.Length == 0) + if (!string.IsNullOrWhiteSpace(dto.VulnerableSoftwareText)) { - return Array.Empty(); + var fallbackEntry = new RuNkckiSoftwareEntry( + dto.VulnerableSoftwareText!, + dto.VulnerableSoftwareText!, + ImmutableArray.Empty); + return CreatePackages(new[] { fallbackEntry }, dto, recordedAt); } - var packageProvenance = new AdvisoryProvenance( - RuNkckiConnectorPlugin.SourceName, - "package", - identifier, - recordedAt, - new[] { ProvenanceFieldMasks.AffectedPackages }); + return Array.Empty(); + } - var status = new AffectedPackageStatus( - dto.PatchAvailable == true ? AffectedPackageStatusCatalog.Fixed : AffectedPackageStatusCatalog.Affected, - new AdvisoryProvenance( + private static IReadOnlyList CreatePackages(IEnumerable entries, RuNkckiVulnerabilityDto dto, DateTimeOffset recordedAt) + { + var type = DeterminePackageType(dto); + var platform = dto.ProductCategories.IsDefaultOrEmpty || dto.ProductCategories.Length == 0 + ? null + : string.Join(", ", dto.ProductCategories); + + var packages = new List(); + + foreach (var entry in entries) + { + if (string.IsNullOrWhiteSpace(entry.Identifier)) + { + continue; + } + + var packageProvenance = new AdvisoryProvenance( RuNkckiConnectorPlugin.SourceName, - "package-status", - dto.PatchAvailable == true ? "patch_available" : "affected", + "package", + entry.Evidence, recordedAt, - new[] { ProvenanceFieldMasks.PackageStatuses })); + new[] { ProvenanceFieldMasks.AffectedPackages }); - return new[] - { - new AffectedPackage( - dto.VulnerableSoftwareHasCpe == true ? AffectedPackageTypes.Cpe : AffectedPackageTypes.Vendor, - identifier, - platform: null, - versionRanges: null, - statuses: new[] { status }, - provenance: new[] { packageProvenance }) - }; + var status = new AffectedPackageStatus( + dto.PatchAvailable == true ? AffectedPackageStatusCatalog.Fixed : AffectedPackageStatusCatalog.Affected, + new AdvisoryProvenance( + RuNkckiConnectorPlugin.SourceName, + "package-status", + dto.PatchAvailable == true ? "patch_available" : "affected", + recordedAt, + new[] { ProvenanceFieldMasks.PackageStatuses })); + + var rangeMetadata = BuildRangeMetadata(entry, recordedAt); + + packages.Add(new AffectedPackage( + type, + entry.Identifier, + platform, + rangeMetadata.Ranges, + new[] { status }, + new[] { packageProvenance }, + rangeMetadata.Normalized)); + } + + return packages; } private static IReadOnlyList BuildCvssMetrics(RuNkckiVulnerabilityDto dto, DateTimeOffset recordedAt, out string? severity) @@ -194,6 +220,27 @@ internal static class RuNkckiMapper severity ??= metric.BaseSeverity; } + if (!string.IsNullOrWhiteSpace(dto.CvssVectorV4)) + { + var vector = dto.CvssVectorV4.StartsWith("CVSS:", StringComparison.OrdinalIgnoreCase) + ? dto.CvssVectorV4 + : $"CVSS:4.0/{dto.CvssVectorV4}"; + var score = dto.CvssScoreV4.HasValue + ? Math.Round(dto.CvssScoreV4.Value, 1, MidpointRounding.AwayFromZero) + : 0.0; + var severityV4 = DetermineCvss4Severity(score); + + var provenance = new AdvisoryProvenance( + RuNkckiConnectorPlugin.SourceName, + "cvss", + vector, + recordedAt, + new[] { ProvenanceFieldMasks.CvssMetrics }); + + metrics.Add(new CvssMetric("4.0", vector, score, severityV4, provenance)); + severity ??= severityV4; + } + return metrics; } @@ -295,4 +342,104 @@ internal static class RuNkckiMapper return false; } + + private static string DeterminePackageType(RuNkckiVulnerabilityDto dto) + { + if (dto.VulnerableSoftwareHasCpe == true) + { + return AffectedPackageTypes.Cpe; + } + + if (!dto.ProductCategories.IsDefault && dto.ProductCategories.Any(static category => + category.Contains("ics", StringComparison.OrdinalIgnoreCase) + || category.Contains("scada", StringComparison.OrdinalIgnoreCase))) + { + return AffectedPackageTypes.IcsVendor; + } + + return AffectedPackageTypes.Vendor; + } + + private static (IReadOnlyList Ranges, IReadOnlyList Normalized) BuildRangeMetadata( + RuNkckiSoftwareEntry entry, + DateTimeOffset recordedAt) + { + if (entry.RangeExpressions.IsDefaultOrEmpty || entry.RangeExpressions.Length == 0) + { + return (Array.Empty(), Array.Empty()); + } + + var ranges = new List(); + var normalized = new List(); + var dedupe = new HashSet(StringComparer.Ordinal); + + foreach (var expression in entry.RangeExpressions) + { + if (string.IsNullOrWhiteSpace(expression)) + { + continue; + } + + var results = SemVerRangeRuleBuilder.Build(expression, provenanceNote: entry.Evidence); + if (results.Count == 0) + { + continue; + } + + foreach (var result in results) + { + var key = $"{result.Primitive.Introduced}|{result.Primitive.Fixed}|{result.Primitive.LastAffected}|{result.Expression}"; + if (!dedupe.Add(key)) + { + continue; + } + + var provenance = new AdvisoryProvenance( + RuNkckiConnectorPlugin.SourceName, + "package-range", + entry.Evidence, + recordedAt, + new[] { ProvenanceFieldMasks.VersionRanges }); + + ranges.Add(new AffectedVersionRange( + NormalizedVersionSchemes.SemVer, + result.Primitive.Introduced, + result.Primitive.Fixed, + result.Primitive.LastAffected, + result.Expression, + provenance, + new RangePrimitives(result.Primitive, null, null, null))); + + normalized.Add(result.NormalizedRule); + } + } + + return ( + ranges.Count == 0 ? Array.Empty() : ranges, + normalized.Count == 0 ? Array.Empty() : normalized); + } + private static string DetermineCvss4Severity(double score) + { + if (score <= 0.0) + { + return "none"; + } + + if (score < 4.0) + { + return "low"; + } + + if (score < 7.0) + { + return "medium"; + } + + if (score < 9.0) + { + return "high"; + } + + return "critical"; + } } diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki/Internal/RuNkckiVulnerabilityDto.cs b/src/StellaOps.Feedser.Source.Ru.Nkcki/Internal/RuNkckiVulnerabilityDto.cs index 14b7da32..a4e006f1 100644 --- a/src/StellaOps.Feedser.Source.Ru.Nkcki/Internal/RuNkckiVulnerabilityDto.cs +++ b/src/StellaOps.Feedser.Source.Ru.Nkcki/Internal/RuNkckiVulnerabilityDto.cs @@ -12,10 +12,11 @@ internal sealed record RuNkckiVulnerabilityDto( bool? PatchAvailable, string? Description, RuNkckiCweDto? Cwe, - string? ProductCategory, + ImmutableArray ProductCategories, string? Mitigation, string? VulnerableSoftwareText, bool? VulnerableSoftwareHasCpe, + ImmutableArray VulnerableSoftwareEntries, double? CvssScore, string? CvssVector, double? CvssScoreV4, @@ -23,7 +24,8 @@ internal sealed record RuNkckiVulnerabilityDto( string? Impact, string? MethodOfExploitation, bool? UserInteraction, - ImmutableArray Urls) + ImmutableArray Urls, + ImmutableArray Tags) { [JsonIgnore] public string AdvisoryKey => !string.IsNullOrWhiteSpace(FstecId) @@ -34,3 +36,5 @@ internal sealed record RuNkckiVulnerabilityDto( } internal sealed record RuNkckiCweDto(int? Number, string? Description); + +internal sealed record RuNkckiSoftwareEntry(string Identifier, string Evidence, ImmutableArray RangeExpressions); diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki/RuNkckiConnector.cs b/src/StellaOps.Feedser.Source.Ru.Nkcki/RuNkckiConnector.cs index fffe86ee..21be40c6 100644 --- a/src/StellaOps.Feedser.Source.Ru.Nkcki/RuNkckiConnector.cs +++ b/src/StellaOps.Feedser.Source.Ru.Nkcki/RuNkckiConnector.cs @@ -55,6 +55,7 @@ public sealed class RuNkckiConnector : IFeedConnector private readonly ISourceStateRepository _stateRepository; private readonly RuNkckiOptions _options; private readonly TimeProvider _timeProvider; + private readonly RuNkckiDiagnostics _diagnostics; private readonly ILogger _logger; private readonly string _cacheDirectory; @@ -68,6 +69,7 @@ public sealed class RuNkckiConnector : IFeedConnector IAdvisoryStore advisoryStore, ISourceStateRepository stateRepository, IOptions options, + RuNkckiDiagnostics diagnostics, TimeProvider? timeProvider, ILogger logger) { @@ -79,6 +81,7 @@ public sealed class RuNkckiConnector : IFeedConnector _stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository)); _options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options)); _options.Validate(); + _diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics)); _timeProvider = timeProvider ?? TimeProvider.System; _logger = logger ?? throw new ArgumentNullException(nameof(logger)); _cacheDirectory = ResolveCacheDirectory(_options.CacheDirectory); @@ -98,28 +101,12 @@ public sealed class RuNkckiConnector : IFeedConnector var now = _timeProvider.GetUtcNow(); var processed = 0; - IReadOnlyList attachments = Array.Empty(); - - try + if (ShouldUseListingCache(cursor, now)) { - var listingResult = await FetchListingAsync(cancellationToken).ConfigureAwait(false); - if (!listingResult.IsSuccess || listingResult.Content is null) - { - _logger.LogWarning("NKCKI listing fetch returned no content (status={Status})", listingResult.StatusCode); - processed = await ProcessCachedBulletinsAsync(pendingDocuments, pendingMappings, knownBulletins, now, processed, cancellationToken).ConfigureAwait(false); - await UpdateCursorAsync(cursor - .WithPendingDocuments(pendingDocuments) - .WithPendingMappings(pendingMappings) - .WithKnownBulletins(NormalizeBulletins(knownBulletins)) - .WithLastListingFetch(now), cancellationToken).ConfigureAwait(false); - return; - } + _logger.LogDebug( + "NKCKI listing fetch skipped (cache duration {CacheDuration:c}); processing cached bulletins only", + _options.ListingCacheDuration); - attachments = await ParseListingAsync(listingResult.Content, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException) - { - _logger.LogWarning(ex, "NKCKI listing fetch failed; attempting cached bulletins"); processed = await ProcessCachedBulletinsAsync(pendingDocuments, pendingMappings, knownBulletins, now, processed, cancellationToken).ConfigureAwait(false); await UpdateCursorAsync(cursor .WithPendingDocuments(pendingDocuments) @@ -129,9 +116,42 @@ public sealed class RuNkckiConnector : IFeedConnector return; } - if (attachments.Count == 0) + ListingFetchSummary listingSummary; + try { - _logger.LogDebug("NKCKI listing contained no bulletin attachments"); + listingSummary = await LoadListingAsync(cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException) + { + _logger.LogWarning(ex, "NKCKI listing fetch failed; attempting cached bulletins"); + _diagnostics.ListingFetchFailure(ex.Message); + await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false); + + processed = await ProcessCachedBulletinsAsync(pendingDocuments, pendingMappings, knownBulletins, now, processed, cancellationToken).ConfigureAwait(false); + await UpdateCursorAsync(cursor + .WithPendingDocuments(pendingDocuments) + .WithPendingMappings(pendingMappings) + .WithKnownBulletins(NormalizeBulletins(knownBulletins)) + .WithLastListingFetch(cursor.LastListingFetchAt ?? now), cancellationToken).ConfigureAwait(false); + return; + } + + var uniqueAttachments = listingSummary.Attachments + .GroupBy(static attachment => attachment.Id, StringComparer.OrdinalIgnoreCase) + .Select(static group => group.First()) + .OrderBy(static attachment => attachment.Id, StringComparer.OrdinalIgnoreCase) + .ToList(); + + var newAttachments = uniqueAttachments + .Where(attachment => !knownBulletins.Contains(attachment.Id)) + .Take(_options.MaxBulletinsPerFetch) + .ToList(); + + _diagnostics.ListingFetchSuccess(listingSummary.PagesVisited, uniqueAttachments.Count, newAttachments.Count); + + if (newAttachments.Count == 0) + { + _logger.LogDebug("NKCKI listing contained no new bulletin attachments"); processed = await ProcessCachedBulletinsAsync(pendingDocuments, pendingMappings, knownBulletins, now, processed, cancellationToken).ConfigureAwait(false); await UpdateCursorAsync(cursor .WithPendingDocuments(pendingDocuments) @@ -141,20 +161,9 @@ public sealed class RuNkckiConnector : IFeedConnector return; } - var newAttachments = attachments - .Where(attachment => !knownBulletins.Contains(attachment.Id)) - .Take(_options.MaxBulletinsPerFetch) - .ToList(); - - if (newAttachments.Count == 0) - { - await UpdateCursorAsync(cursor - .WithPendingDocuments(pendingDocuments) - .WithPendingMappings(pendingMappings) - .WithKnownBulletins(NormalizeBulletins(knownBulletins)) - .WithLastListingFetch(now), cancellationToken).ConfigureAwait(false); - return; - } + var downloaded = 0; + var cachedUsed = 0; + var failures = 0; foreach (var attachment in newAttachments) { @@ -173,18 +182,24 @@ public sealed class RuNkckiConnector : IFeedConnector { if (TryReadCachedBulletin(attachment.Id, out var cachedBytes)) { + _diagnostics.BulletinFetchCached(); + cachedUsed++; _logger.LogWarning("NKCKI bulletin {BulletinId} unavailable (status={Status}); using cached artefact", attachment.Id, attachmentResult.StatusCode); processed = await ProcessBulletinEntriesAsync(cachedBytes, attachment.Id, pendingDocuments, pendingMappings, now, processed, cancellationToken).ConfigureAwait(false); knownBulletins.Add(attachment.Id); } else { + _diagnostics.BulletinFetchFailure(attachmentResult.StatusCode.ToString()); + failures++; _logger.LogWarning("NKCKI bulletin {BulletinId} returned no content (status={Status})", attachment.Id, attachmentResult.StatusCode); } continue; } + _diagnostics.BulletinFetchSuccess(); + downloaded++; TryWriteCachedBulletin(attachment.Id, attachmentResult.Content); processed = await ProcessBulletinEntriesAsync(attachmentResult.Content, attachment.Id, pendingDocuments, pendingMappings, now, processed, cancellationToken).ConfigureAwait(false); knownBulletins.Add(attachment.Id); @@ -193,12 +208,16 @@ public sealed class RuNkckiConnector : IFeedConnector { if (TryReadCachedBulletin(attachment.Id, out var cachedBytes)) { + _diagnostics.BulletinFetchCached(); + cachedUsed++; _logger.LogWarning(ex, "NKCKI bulletin fetch failed for {BulletinId}; using cached artefact", attachment.Id); processed = await ProcessBulletinEntriesAsync(cachedBytes, attachment.Id, pendingDocuments, pendingMappings, now, processed, cancellationToken).ConfigureAwait(false); knownBulletins.Add(attachment.Id); } else { + _diagnostics.BulletinFetchFailure(ex.Message); + failures++; _logger.LogWarning(ex, "NKCKI bulletin fetch failed for {BulletinId}", attachment.Id); await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false); throw; @@ -223,6 +242,11 @@ public sealed class RuNkckiConnector : IFeedConnector } } + if (processed < _options.MaxVulnerabilitiesPerFetch) + { + processed = await ProcessCachedBulletinsAsync(pendingDocuments, pendingMappings, knownBulletins, now, processed, cancellationToken).ConfigureAwait(false); + } + var normalizedBulletins = NormalizeBulletins(knownBulletins); var updatedCursor = cursor @@ -232,6 +256,15 @@ public sealed class RuNkckiConnector : IFeedConnector .WithLastListingFetch(now); await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + + _logger.LogInformation( + "NKCKI fetch complete: new bulletins {Downloaded}, cached bulletins {Cached}, failures {Failures}, processed entries {Processed}, pending documents {PendingDocuments}, pending mappings {PendingMappings}", + downloaded, + cachedUsed, + failures, + processed, + pendingDocuments.Count, + pendingMappings.Count); } public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) @@ -425,6 +458,7 @@ public sealed class RuNkckiConnector : IFeedConnector continue; } + _diagnostics.BulletinFetchCached(); updated = await ProcessBulletinEntriesAsync(content, bulletinId, pendingDocuments, pendingMappings, now, updated, cancellationToken).ConfigureAwait(false); knownBulletins.Add(bulletinId); @@ -484,6 +518,12 @@ public sealed class RuNkckiConnector : IFeedConnector } } + var delta = updated - processed; + if (delta > 0) + { + _diagnostics.EntriesProcessed(delta); + } + return updated; } @@ -607,34 +647,25 @@ public sealed class RuNkckiConnector : IFeedConnector return true; } - private async Task FetchListingAsync(CancellationToken cancellationToken) + private Task FetchListingPageAsync(Uri pageUri, CancellationToken cancellationToken) { - try + var request = new SourceFetchRequest(RuNkckiOptions.HttpClientName, SourceName, pageUri) { - var request = new SourceFetchRequest(RuNkckiOptions.HttpClientName, SourceName, _options.ListingUri) - { - AcceptHeaders = ListingAcceptHeaders, - TimeoutOverride = _options.RequestTimeout, - }; + AcceptHeaders = ListingAcceptHeaders, + TimeoutOverride = _options.RequestTimeout, + }; - return await _fetchService.FetchContentAsync(request, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException) - { - _logger.LogError(ex, "NKCKI listing fetch failed for {ListingUri}", _options.ListingUri); - await _stateRepository.MarkFailureAsync(SourceName, _timeProvider.GetUtcNow(), _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false); - throw; - } + return _fetchService.FetchContentAsync(request, cancellationToken); } - private async Task> ParseListingAsync(byte[] content, CancellationToken cancellationToken) + private async Task ParseListingAsync(Uri pageUri, byte[] content, CancellationToken cancellationToken) { var html = Encoding.UTF8.GetString(content); var document = await _htmlParser.ParseDocumentAsync(html, cancellationToken).ConfigureAwait(false); - var anchors = document.QuerySelectorAll("a[href$='.json.zip']"); - var attachments = new List(); - foreach (var anchor in anchors) + var pagination = new List(); + + foreach (var anchor in document.QuerySelectorAll("a[href$='.json.zip']")) { var href = anchor.GetAttribute("href"); if (string.IsNullOrWhiteSpace(href)) @@ -642,7 +673,7 @@ public sealed class RuNkckiConnector : IFeedConnector continue; } - if (!Uri.TryCreate(_options.BaseAddress, href, out var absoluteUri)) + if (!Uri.TryCreate(pageUri, href, out var absoluteUri)) { continue; } @@ -662,7 +693,32 @@ public sealed class RuNkckiConnector : IFeedConnector attachments.Add(new BulletinAttachment(id, absoluteUri, title ?? id)); } - return attachments; + foreach (var anchor in document.QuerySelectorAll("a[href]")) + { + var href = anchor.GetAttribute("href"); + if (string.IsNullOrWhiteSpace(href)) + { + continue; + } + + if (!href.Contains("PAGEN", StringComparison.OrdinalIgnoreCase) + && !href.Contains("page=", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + if (Uri.TryCreate(pageUri, href, out var absoluteUri)) + { + pagination.Add(absoluteUri); + } + } + + var uniquePagination = pagination + .DistinctBy(static uri => uri.AbsoluteUri, StringComparer.OrdinalIgnoreCase) + .Take(_options.MaxListingPagesPerFetch) + .ToList(); + + return new ListingPageResult(attachments, uniquePagination); } private static string DeriveBulletinId(Uri uri) @@ -821,5 +877,70 @@ public sealed class RuNkckiConnector : IFeedConnector return _stateRepository.UpdateCursorAsync(SourceName, document, completedAt, cancellationToken); } + private readonly record struct ListingFetchSummary(IReadOnlyList Attachments, int PagesVisited); + + private readonly record struct ListingPageResult(IReadOnlyList Attachments, IReadOnlyList PaginationLinks); + private readonly record struct BulletinAttachment(string Id, Uri Uri, string Title); + + private bool ShouldUseListingCache(RuNkckiCursor cursor, DateTimeOffset now) + { + if (!cursor.LastListingFetchAt.HasValue) + { + return false; + } + + var age = now - cursor.LastListingFetchAt.Value; + return age < _options.ListingCacheDuration; + } + + private async Task LoadListingAsync(CancellationToken cancellationToken) + { + var attachments = new List(); + var visited = 0; + var visitedUris = new HashSet(StringComparer.OrdinalIgnoreCase); + var queue = new Queue(); + queue.Enqueue(_options.ListingUri); + + while (queue.Count > 0 && visited < _options.MaxListingPagesPerFetch) + { + cancellationToken.ThrowIfCancellationRequested(); + + var pageUri = queue.Dequeue(); + if (!visitedUris.Add(pageUri.AbsoluteUri)) + { + continue; + } + + _diagnostics.ListingFetchAttempt(); + + var listingResult = await FetchListingPageAsync(pageUri, cancellationToken).ConfigureAwait(false); + if (!listingResult.IsSuccess || listingResult.Content is null) + { + _diagnostics.ListingFetchFailure(listingResult.StatusCode.ToString()); + _logger.LogWarning("NKCKI listing page {ListingUri} returned no content (status={Status})", pageUri, listingResult.StatusCode); + continue; + } + + visited++; + + var page = await ParseListingAsync(pageUri, listingResult.Content, cancellationToken).ConfigureAwait(false); + attachments.AddRange(page.Attachments); + + foreach (var link in page.PaginationLinks) + { + if (!visitedUris.Contains(link.AbsoluteUri) && queue.Count + visitedUris.Count < _options.MaxListingPagesPerFetch) + { + queue.Enqueue(link); + } + } + + if (attachments.Count >= _options.MaxBulletinsPerFetch * 2) + { + break; + } + } + + return new ListingFetchSummary(attachments, visited); + } } diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki/RuNkckiServiceCollectionExtensions.cs b/src/StellaOps.Feedser.Source.Ru.Nkcki/RuNkckiServiceCollectionExtensions.cs index 58f5cda9..4c7d58d8 100644 --- a/src/StellaOps.Feedser.Source.Ru.Nkcki/RuNkckiServiceCollectionExtensions.cs +++ b/src/StellaOps.Feedser.Source.Ru.Nkcki/RuNkckiServiceCollectionExtensions.cs @@ -1,8 +1,10 @@ using System.Net; using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; using Microsoft.Extensions.Options; using StellaOps.Feedser.Source.Common.Http; using StellaOps.Feedser.Source.Ru.Nkcki.Configuration; +using StellaOps.Feedser.Source.Ru.Nkcki.Internal; namespace StellaOps.Feedser.Source.Ru.Nkcki; @@ -36,6 +38,7 @@ public static class RuNkckiServiceCollectionExtensions }; }); + services.TryAddSingleton(); services.AddTransient(); return services; diff --git a/src/StellaOps.Feedser.Source.Ru.Nkcki/TASKS.md b/src/StellaOps.Feedser.Source.Ru.Nkcki/TASKS.md index 739974a4..976bd795 100644 --- a/src/StellaOps.Feedser.Source.Ru.Nkcki/TASKS.md +++ b/src/StellaOps.Feedser.Source.Ru.Nkcki/TASKS.md @@ -2,10 +2,10 @@ | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |FEEDCONN-NKCKI-02-001 Research NKTsKI advisory feeds|BE-Conn-Nkcki|Research|**DONE (2025-10-11)** – Candidate RSS locations (`https://cert.gov.ru/rss/advisories.xml`, `https://www.cert.gov.ru/...`) return 403/404 even with `Accept-Language: ru-RU` and `--insecure`; site is Bitrix-backed and expects Russian Trusted Sub CA plus session cookies. Logged packet captures + needed cert list in `docs/feedser-connector-research-20251011.md`; waiting on Ops for sanctioned trust bundle.| -|FEEDCONN-NKCKI-02-002 Fetch pipeline & state persistence|BE-Conn-Nkcki|Source.Common, Storage.Mongo|**DOING (2025-10-12)** – Listing fetch now expands `*.json.zip` bulletins into per-vulnerability JSON documents with cursor-tracked bulletin IDs and trust store wiring (`globalsign_r6_bundle.pem`). Parser/mapper emit canonical advisories; remaining work: strengthen pagination/backfill handling and add regression fixtures/telemetry. Offline cache helpers (ProcessCachedBulletinsAsync/TryReadCachedBulletin/TryWriteCachedBulletin) implemented.| -|FEEDCONN-NKCKI-02-003 DTO & parser implementation|BE-Conn-Nkcki|Source.Common|**DOING (2025-10-12)** – `RuNkckiJsonParser` extracts per-vulnerability JSON payloads (IDs, CVEs, CVSS, software text, URLs). TODO: extend coverage for optional fields (ICS categories, nested arrays) and add fixture snapshots.| -|FEEDCONN-NKCKI-02-004 Canonical mapping & range primitives|BE-Conn-Nkcki|Models|**DOING (2025-10-12)** – `RuNkckiMapper` maps JSON entries to canonical advisories (aliases, references, vendor package, CVSS). Next steps: enrich package parsing (`software_text` tokenisation), consider CVSS v4 metadata, and backfill provenance docs before closing the task.| -|FEEDCONN-NKCKI-02-005 Deterministic fixtures & tests|QA|Testing|**DOING (2025-10-12)** – Added mocked listing/bulletin regression harness (`RuNkckiConnectorTests`) with fixtures + snapshot writer. Test run currently blocked on Mongo2Go dependency (libcrypto.so.1.1 missing); follow-up required to get embedded mongod running in CI before marking DONE.| -|FEEDCONN-NKCKI-02-006 Telemetry & documentation|DevEx|Docs|**TODO** – Add logging/metrics, document connector configuration, and close backlog entry after deliverable ships.| -|FEEDCONN-NKCKI-02-007 Archive ingestion strategy|BE-Conn-Nkcki|Research|**TODO** – Once access restored, map Bitrix paging (`?PAGEN_1=`) and advisory taxonomy (alerts vs recommendations). Outline HTML scrape + PDF attachment handling for backfill and decide translation approach for Russian-only content.| +|FEEDCONN-NKCKI-02-002 Fetch pipeline & state persistence|BE-Conn-Nkcki|Source.Common, Storage.Mongo|**DONE (2025-10-13)** – Listing fetch now honours `maxListingPagesPerFetch`, persists cache hits when listing access fails, and records telemetry via `RuNkckiDiagnostics`. Cursor tracking covers pending documents/mappings and the known bulletin ring buffer.| +|FEEDCONN-NKCKI-02-003 DTO & parser implementation|BE-Conn-Nkcki|Source.Common|**DONE (2025-10-13)** – Parser normalises nested arrays (ICS categories, vulnerable software lists, optional tags), flattens multiline `software_text`, and guarantees deterministic ordering for URLs and tags.| +|FEEDCONN-NKCKI-02-004 Canonical mapping & range primitives|BE-Conn-Nkcki|Models|**DONE (2025-10-13)** – Mapper splits structured software entries, emits SemVer range primitives + normalized rules, deduplicates references, and surfaces CVSS v4 metadata alongside existing metrics.| +|FEEDCONN-NKCKI-02-005 Deterministic fixtures & tests|QA|Testing|**DONE (2025-10-13)** – Fixtures refreshed with multi-page pagination + multi-entry bulletins. Tests exercise cache replay and rely on bundled OpenSSL 1.1 libs in `tools/openssl/linux-x64` to keep Mongo2Go green on modern distros.| +|FEEDCONN-NKCKI-02-006 Telemetry & documentation|DevEx|Docs|**DONE (2025-10-13)** – Added connector-specific metrics (`nkcki.*`) and documented configuration/operational guidance in `docs/ops/feedser-nkcki-operations.md`.| +|FEEDCONN-NKCKI-02-007 Archive ingestion strategy|BE-Conn-Nkcki|Research|**DONE (2025-10-13)** – Documented Bitrix pagination/backfill plan (cache-first, offline replay, HTML/PDF capture) in `docs/ops/feedser-nkcki-operations.md`.| |FEEDCONN-NKCKI-02-008 Access enablement plan|BE-Conn-Nkcki|Source.Common|**DONE (2025-10-11)** – Documented trust-store requirement, optional SOCKS proxy fallback, and monitoring plan; shared TLS support now available via `SourceHttpClientOptions.TrustedRootCertificates` (`feedser:httpClients:source.nkcki:*`), awaiting Ops-sourced cert bundle before fetch implementation.| diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco.Tests/CiscoDtoFactoryTests.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco.Tests/CiscoDtoFactoryTests.cs new file mode 100644 index 00000000..e0384e04 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco.Tests/CiscoDtoFactoryTests.cs @@ -0,0 +1,73 @@ +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Feedser.Source.Vndr.Cisco.Internal; +using Xunit; + +namespace StellaOps.Feedser.Source.Vndr.Cisco.Tests; + +public sealed class CiscoDtoFactoryTests +{ + [Fact] + public async Task CreateAsync_MergesRawAndCsafProducts() + { + const string CsafPayload = @" +{ + ""product_tree"": { + ""full_product_names"": [ + { ""product_id"": ""PID-1"", ""name"": ""Cisco Widget"" } + ] + }, + ""vulnerabilities"": [ + { + ""product_status"": { + ""known_affected"": [""PID-1""] + } + } + ] +}"; + + var csafClient = new StubCsafClient(CsafPayload); + var factory = new CiscoDtoFactory(csafClient, NullLogger.Instance); + + var raw = new CiscoRawAdvisory + { + AdvisoryId = "CISCO-SA-TEST", + AdvisoryTitle = "Test Advisory", + Summary = "Summary", + Sir = "High", + FirstPublished = "2025-10-01T00:00:00Z", + LastUpdated = "2025-10-02T00:00:00Z", + PublicationUrl = "https://example.com/advisory", + CsafUrl = "https://sec.cloudapps.cisco.com/csaf/test.json", + Cves = new List { "CVE-2024-0001" }, + BugIds = new List { "BUG123" }, + ProductNames = new List { "Cisco Widget" }, + Version = "1.2.3", + CvssBaseScore = "9.8" + }; + + var dto = await factory.CreateAsync(raw, CancellationToken.None); + + dto.Should().NotBeNull(); + dto.Severity.Should().Be("high"); + dto.CvssBaseScore.Should().Be(9.8); + dto.Products.Should().HaveCount(1); + var product = dto.Products[0]; + product.Name.Should().Be("Cisco Widget"); + product.ProductId.Should().Be("PID-1"); + product.Statuses.Should().Contain("known_affected"); + } + + private sealed class StubCsafClient : ICiscoCsafClient + { + private readonly string? _payload; + + public StubCsafClient(string? payload) => _payload = payload; + + public Task TryFetchAsync(string? url, CancellationToken cancellationToken) + => Task.FromResult(_payload); + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco.Tests/CiscoMapperTests.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco.Tests/CiscoMapperTests.cs new file mode 100644 index 00000000..b117ce25 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco.Tests/CiscoMapperTests.cs @@ -0,0 +1,79 @@ +using System; +using System.Collections.Generic; +using FluentAssertions; +using MongoDB.Bson; +using StellaOps.Feedser.Models; +using StellaOps.Feedser.Source.Common; +using StellaOps.Feedser.Source.Vndr.Cisco; +using StellaOps.Feedser.Source.Vndr.Cisco.Internal; +using StellaOps.Feedser.Storage.Mongo.Documents; +using StellaOps.Feedser.Storage.Mongo.Dtos; +using Xunit; + +namespace StellaOps.Feedser.Source.Vndr.Cisco.Tests; + +public sealed class CiscoMapperTests +{ + [Fact] + public void Map_ProducesCanonicalAdvisory() + { + var published = new DateTimeOffset(2025, 10, 1, 0, 0, 0, TimeSpan.Zero); + var updated = published.AddDays(1); + + var dto = new CiscoAdvisoryDto( + AdvisoryId: "CISCO-SA-TEST", + Title: "Test Advisory", + Summary: "Sample summary", + Severity: "High", + Published: published, + Updated: updated, + PublicationUrl: "https://example.com/advisory", + CsafUrl: "https://sec.cloudapps.cisco.com/csaf/test.json", + CvrfUrl: "https://example.com/cvrf.xml", + CvssBaseScore: 9.8, + Cves: new List { "CVE-2024-0001" }, + BugIds: new List { "BUG123" }, + Products: new List + { + new("Cisco Widget", "PID-1", "1.2.3", new [] { AffectedPackageStatusCatalog.KnownAffected }) + }); + + var document = new DocumentRecord( + Id: Guid.NewGuid(), + SourceName: VndrCiscoConnectorPlugin.SourceName, + Uri: "https://api.cisco.com/security/advisories/v2/advisories/CISCO-SA-TEST", + FetchedAt: published, + Sha256: "abc123", + Status: DocumentStatuses.PendingMap, + ContentType: "application/json", + Headers: null, + Metadata: null, + Etag: null, + LastModified: updated, + GridFsId: null); + + var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, VndrCiscoConnectorPlugin.SourceName, "cisco.dto.test", new BsonDocument(), updated); + + var advisory = CiscoMapper.Map(dto, document, dtoRecord); + + advisory.AdvisoryKey.Should().Be("CISCO-SA-TEST"); + advisory.Title.Should().Be("Test Advisory"); + advisory.Severity.Should().Be("high"); + advisory.Aliases.Should().Contain(new[] { "CISCO-SA-TEST", "CVE-2024-0001", "BUG123" }); + advisory.References.Should().Contain(reference => reference.Url == "https://example.com/advisory"); + advisory.References.Should().Contain(reference => reference.Url == "https://sec.cloudapps.cisco.com/csaf/test.json"); + advisory.AffectedPackages.Should().HaveCount(1); + + var package = advisory.AffectedPackages[0]; + package.Type.Should().Be(AffectedPackageTypes.Vendor); + package.Identifier.Should().Be("Cisco Widget"); + package.Statuses.Should().ContainSingle(status => status.Status == AffectedPackageStatusCatalog.KnownAffected); + package.VersionRanges.Should().ContainSingle(); + var range = package.VersionRanges[0]; + range.RangeKind.Should().Be("semver"); + range.Provenance.Source.Should().Be(VndrCiscoConnectorPlugin.SourceName); + range.Primitives.Should().NotBeNull(); + range.Primitives!.SemVer.Should().NotBeNull(); + range.Primitives.SemVer!.ExactValue.Should().Be("1.2.3"); + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco.Tests/StellaOps.Feedser.Source.Vndr.Cisco.Tests.csproj b/src/StellaOps.Feedser.Source.Vndr.Cisco.Tests/StellaOps.Feedser.Source.Vndr.Cisco.Tests.csproj new file mode 100644 index 00000000..eaaa91d9 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco.Tests/StellaOps.Feedser.Source.Vndr.Cisco.Tests.csproj @@ -0,0 +1,17 @@ + + + net10.0 + enable + enable + + + + + + + + + + + + diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/CiscoConnector.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/CiscoConnector.cs new file mode 100644 index 00000000..52b800db --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/CiscoConnector.cs @@ -0,0 +1,601 @@ +using System.Globalization; +using System.Linq; +using System.Security.Cryptography; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Feedser.Source.Common; +using StellaOps.Feedser.Source.Common.Fetch; +using StellaOps.Feedser.Source.Vndr.Cisco.Configuration; +using StellaOps.Feedser.Source.Vndr.Cisco.Internal; +using StellaOps.Feedser.Storage.Mongo; +using StellaOps.Feedser.Storage.Mongo.Advisories; +using StellaOps.Feedser.Storage.Mongo.Documents; +using StellaOps.Feedser.Storage.Mongo.Dtos; +using StellaOps.Plugin; + +namespace StellaOps.Feedser.Source.Vndr.Cisco; + +public sealed class CiscoConnector : IFeedConnector +{ + private const string DtoSchemaVersion = "cisco.dto.v1"; + + private static readonly JsonSerializerOptions RawSerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + }; + + private static readonly JsonSerializerOptions DtoSerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + PropertyNameCaseInsensitive = true, + }; + + private static readonly IReadOnlyDictionary DefaultHeaders = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["content-type"] = "application/json", + }; + + private readonly CiscoOpenVulnClient _openVulnClient; + private readonly RawDocumentStorage _rawDocumentStorage; + private readonly IDocumentStore _documentStore; + private readonly IDtoStore _dtoStore; + private readonly IAdvisoryStore _advisoryStore; + private readonly ISourceStateRepository _stateRepository; + private readonly CiscoDtoFactory _dtoFactory; + private readonly CiscoDiagnostics _diagnostics; + private readonly IOptions _options; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + public CiscoConnector( + CiscoOpenVulnClient openVulnClient, + RawDocumentStorage rawDocumentStorage, + IDocumentStore documentStore, + IDtoStore dtoStore, + IAdvisoryStore advisoryStore, + ISourceStateRepository stateRepository, + CiscoDtoFactory dtoFactory, + CiscoDiagnostics diagnostics, + IOptions options, + TimeProvider? timeProvider, + ILogger logger) + { + _openVulnClient = openVulnClient ?? throw new ArgumentNullException(nameof(openVulnClient)); + _rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage)); + _documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore)); + _dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore)); + _advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore)); + _stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository)); + _dtoFactory = dtoFactory ?? throw new ArgumentNullException(nameof(dtoFactory)); + _diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public string SourceName => VndrCiscoConnectorPlugin.SourceName; + + public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var now = _timeProvider.GetUtcNow(); + var options = _options.Value; + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + var pendingDocuments = cursor.PendingDocuments.ToHashSet(); + var pendingMappings = cursor.PendingMappings.ToHashSet(); + + var latestModified = cursor.LastModified; + var latestAdvisoryId = cursor.LastAdvisoryId; + + var startDate = DetermineStartDate(cursor, now, options); + var endDate = DateOnly.FromDateTime(now.UtcDateTime.Date); + + var added = 0; + var pagesFetched = 0; + + try + { + for (var date = startDate; date <= endDate; date = date.AddDays(1)) + { + cancellationToken.ThrowIfCancellationRequested(); + + for (var pageIndex = 1; pageIndex <= options.MaxPagesPerFetch; pageIndex++) + { + cancellationToken.ThrowIfCancellationRequested(); + + var page = await _openVulnClient.FetchAsync(date, pageIndex, cancellationToken).ConfigureAwait(false); + pagesFetched++; + + if (page is null || page.Advisories.Count == 0) + { + break; + } + + foreach (var advisory in page.Advisories + .OrderBy(static a => a.LastUpdated ?? DateTimeOffset.MinValue) + .ThenBy(static a => a.AdvisoryId, StringComparer.OrdinalIgnoreCase)) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (!ShouldProcess(advisory, latestModified, latestAdvisoryId)) + { + continue; + } + + var documentUri = BuildDocumentUri(advisory.AdvisoryId); + var payload = advisory.GetRawBytes(); + var sha = ComputeSha256(payload); + + var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, documentUri, cancellationToken).ConfigureAwait(false); + if (existing is not null && string.Equals(existing.Sha256, sha, StringComparison.OrdinalIgnoreCase)) + { + _diagnostics.FetchUnchanged(); + continue; + } + + ObjectId gridFsId; + try + { + gridFsId = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, payload, "application/json", cancellationToken).ConfigureAwait(false); + } + catch (MongoWriteException ex) + { + _diagnostics.FetchFailure(); + _logger.LogError(ex, "Failed to upload Cisco advisory {AdvisoryId} to GridFS", advisory.AdvisoryId); + throw; + } + + var recordId = existing?.Id ?? Guid.NewGuid(); + var record = new DocumentRecord( + recordId, + SourceName, + documentUri, + now, + sha, + DocumentStatuses.PendingParse, + "application/json", + DefaultHeaders, + BuildMetadata(advisory), + Etag: null, + LastModified: advisory.LastUpdated ?? advisory.FirstPublished ?? now, + GridFsId: gridFsId, + ExpiresAt: null); + + var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false); + pendingDocuments.Add(upserted.Id); + pendingMappings.Remove(upserted.Id); + added++; + _diagnostics.FetchDocument(); + + if (advisory.LastUpdated.HasValue) + { + latestModified = advisory.LastUpdated; + latestAdvisoryId = advisory.AdvisoryId; + } + + if (added >= options.MaxAdvisoriesPerFetch) + { + break; + } + } + + if (added >= options.MaxAdvisoriesPerFetch) + { + break; + } + + if (!page.HasMore) + { + break; + } + + await DelayAsync(options.RequestDelay, cancellationToken).ConfigureAwait(false); + } + + if (added >= options.MaxAdvisoriesPerFetch) + { + break; + } + } + + var updatedCursor = cursor + .WithPendingDocuments(pendingDocuments) + .WithPendingMappings(pendingMappings); + + if (latestModified.HasValue) + { + updatedCursor = updatedCursor.WithCheckpoint(latestModified.Value, latestAdvisoryId ?? string.Empty); + } + + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + _logger.LogInformation( + "Cisco fetch completed startDate={StartDate} pages={PagesFetched} added={Added} lastUpdated={LastUpdated} lastAdvisoryId={LastAdvisoryId}", + startDate, + pagesFetched, + added, + latestModified, + latestAdvisoryId); + } + catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException or JsonException or MongoException) + { + _diagnostics.FetchFailure(); + _logger.LogError(ex, "Cisco fetch failed"); + await _stateRepository.MarkFailureAsync(SourceName, now, options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false); + throw; + } + } + + public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + if (cursor.PendingDocuments.Count == 0) + { + return; + } + + var pendingDocuments = cursor.PendingDocuments.ToList(); + var pendingMappings = cursor.PendingMappings.ToList(); + var parsed = 0; + var failures = 0; + + foreach (var documentId in cursor.PendingDocuments) + { + cancellationToken.ThrowIfCancellationRequested(); + + var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); + if (document is null) + { + pendingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + _diagnostics.ParseFailure(); + failures++; + continue; + } + + if (!document.GridFsId.HasValue) + { + _diagnostics.ParseFailure(); + _logger.LogWarning("Cisco document {DocumentId} missing GridFS payload", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + failures++; + continue; + } + + byte[] payload; + try + { + payload = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _diagnostics.ParseFailure(); + _logger.LogError(ex, "Cisco unable to download raw document {DocumentId}", documentId); + throw; + } + + CiscoRawAdvisory? raw; + try + { + raw = JsonSerializer.Deserialize(payload, RawSerializerOptions); + } + catch (Exception ex) + { + _diagnostics.ParseFailure(); + _logger.LogWarning(ex, "Cisco failed to deserialize raw document {DocumentId}", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + failures++; + continue; + } + + if (raw is null) + { + _diagnostics.ParseFailure(); + _logger.LogWarning("Cisco raw document {DocumentId} produced null payload", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + failures++; + continue; + } + + CiscoAdvisoryDto dto; + try + { + dto = await _dtoFactory.CreateAsync(raw, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _diagnostics.ParseFailure(); + _logger.LogWarning(ex, "Cisco failed to build DTO for {DocumentId}", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + failures++; + continue; + } + + try + { + var dtoJson = JsonSerializer.Serialize(dto, DtoSerializerOptions); + var dtoBson = BsonDocument.Parse(dtoJson); + var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, DtoSchemaVersion, dtoBson, _timeProvider.GetUtcNow()); + await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false); + pendingDocuments.Remove(documentId); + if (!pendingMappings.Contains(documentId)) + { + pendingMappings.Add(documentId); + } + + _diagnostics.ParseSuccess(); + parsed++; + } + catch (Exception ex) + { + _diagnostics.ParseFailure(); + _logger.LogError(ex, "Cisco failed to persist DTO for {DocumentId}", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + failures++; + } + } + + var updatedCursor = cursor + .WithPendingDocuments(pendingDocuments) + .WithPendingMappings(pendingMappings); + + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + + if (parsed > 0 || failures > 0) + { + _logger.LogInformation( + "Cisco parse completed parsed={Parsed} failures={Failures} pendingDocuments={PendingDocuments} pendingMappings={PendingMappings}", + parsed, + failures, + pendingDocuments.Count, + pendingMappings.Count); + } + } + + public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + if (cursor.PendingMappings.Count == 0) + { + return; + } + + var pendingMappings = cursor.PendingMappings.ToList(); + var mapped = 0; + var failures = 0; + + foreach (var documentId in cursor.PendingMappings) + { + cancellationToken.ThrowIfCancellationRequested(); + + var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); + if (document is null) + { + pendingMappings.Remove(documentId); + _diagnostics.MapFailure(); + failures++; + continue; + } + + var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false); + if (dtoRecord is null) + { + _diagnostics.MapFailure(); + _logger.LogWarning("Cisco document {DocumentId} missing DTO payload", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + failures++; + continue; + } + + CiscoAdvisoryDto? dto; + try + { + var json = dtoRecord.Payload.ToJson(); + dto = JsonSerializer.Deserialize(json, DtoSerializerOptions); + } + catch (Exception ex) + { + _diagnostics.MapFailure(); + _logger.LogWarning(ex, "Cisco failed to deserialize DTO for document {DocumentId}", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + failures++; + continue; + } + + if (dto is null) + { + _diagnostics.MapFailure(); + _logger.LogWarning("Cisco DTO for document {DocumentId} evaluated to null", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + failures++; + continue; + } + + try + { + var advisory = CiscoMapper.Map(dto, document, dtoRecord); + await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + _diagnostics.MapSuccess(); + _diagnostics.MapAffected(advisory.AffectedPackages.Length); + mapped++; + } + catch (Exception ex) + { + _diagnostics.MapFailure(); + _logger.LogError(ex, "Cisco mapping failed for document {DocumentId}", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + failures++; + } + } + + var updatedCursor = cursor.WithPendingMappings(pendingMappings); + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + + if (mapped > 0 || failures > 0) + { + _logger.LogInformation( + "Cisco map completed mapped={Mapped} failures={Failures} pendingMappings={PendingMappings}", + mapped, + failures, + pendingMappings.Count); + } + } + + private static string ComputeSha256(byte[] payload) + { + Span hash = stackalloc byte[32]; + SHA256.HashData(payload, hash); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static bool ShouldProcess(CiscoAdvisoryItem advisory, DateTimeOffset? checkpoint, string? checkpointId) + { + if (checkpoint is null || advisory.LastUpdated is null) + { + return true; + } + + var comparison = advisory.LastUpdated.Value.CompareTo(checkpoint.Value); + if (comparison > 0) + { + return true; + } + + if (comparison < 0) + { + return false; + } + + if (string.IsNullOrWhiteSpace(checkpointId)) + { + return true; + } + + return string.Compare(advisory.AdvisoryId, checkpointId, StringComparison.OrdinalIgnoreCase) > 0; + } + + private static IReadOnlyDictionary BuildMetadata(CiscoAdvisoryItem advisory) + { + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["cisco.advisoryId"] = advisory.AdvisoryId, + }; + + if (advisory.LastUpdated.HasValue) + { + metadata["cisco.lastUpdated"] = advisory.LastUpdated.Value.ToString("O", CultureInfo.InvariantCulture); + } + + if (advisory.FirstPublished.HasValue) + { + metadata["cisco.firstPublished"] = advisory.FirstPublished.Value.ToString("O", CultureInfo.InvariantCulture); + } + + if (!string.IsNullOrWhiteSpace(advisory.Severity)) + { + metadata["cisco.severity"] = advisory.Severity!; + } + + if (!string.IsNullOrWhiteSpace(advisory.CsafUrl)) + { + metadata["cisco.csafUrl"] = advisory.CsafUrl!; + } + + if (!string.IsNullOrWhiteSpace(advisory.CvrfUrl)) + { + metadata["cisco.cvrfUrl"] = advisory.CvrfUrl!; + } + + if (!string.IsNullOrWhiteSpace(advisory.PublicationUrl)) + { + metadata["cisco.publicationUrl"] = advisory.PublicationUrl!; + } + + if (!string.IsNullOrWhiteSpace(advisory.CvssBaseScore)) + { + metadata["cisco.cvssBaseScore"] = advisory.CvssBaseScore!; + } + + if (advisory.Cves.Count > 0) + { + metadata["cisco.cves"] = string.Join(",", advisory.Cves); + } + + if (advisory.BugIds.Count > 0) + { + metadata["cisco.bugIds"] = string.Join(",", advisory.BugIds); + } + + if (advisory.ProductNames.Count > 0) + { + metadata["cisco.productNames"] = string.Join(";", advisory.ProductNames.Take(10)); + } + + return metadata; + } + + private static DateOnly DetermineStartDate(CiscoCursor cursor, DateTimeOffset now, CiscoOptions options) + { + if (cursor.LastModified.HasValue) + { + return DateOnly.FromDateTime(cursor.LastModified.Value.UtcDateTime.Date); + } + + var baseline = now - options.InitialBackfillWindow; + return DateOnly.FromDateTime(baseline.UtcDateTime.Date); + } + + private string BuildDocumentUri(string advisoryId) + { + var baseUri = _options.Value.BaseUri; + var relative = $"advisories/{Uri.EscapeDataString(advisoryId)}"; + return new Uri(baseUri, relative).ToString(); + } + + private async Task GetCursorAsync(CancellationToken cancellationToken) + { + var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false); + return state is null ? CiscoCursor.Empty : CiscoCursor.FromBson(state.Cursor); + } + + private async Task UpdateCursorAsync(CiscoCursor cursor, CancellationToken cancellationToken) + { + var document = cursor.ToBson(); + await _stateRepository.UpdateCursorAsync(SourceName, document, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false); + } + + private static Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken) + { + if (delay <= TimeSpan.Zero) + { + return Task.CompletedTask; + } + + return Task.Delay(delay, cancellationToken); + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/CiscoDependencyInjectionRoutine.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/CiscoDependencyInjectionRoutine.cs new file mode 100644 index 00000000..e532a40d --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/CiscoDependencyInjectionRoutine.cs @@ -0,0 +1,53 @@ +using System; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.DependencyInjection; +using StellaOps.Feedser.Core.Jobs; + +namespace StellaOps.Feedser.Source.Vndr.Cisco; + +public sealed class CiscoDependencyInjectionRoutine : IDependencyInjectionRoutine +{ + private const string ConfigurationSection = "feedser:sources:cisco"; + + public IServiceCollection Register(IServiceCollection services, IConfiguration configuration) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services.AddCiscoConnector(options => + { + configuration.GetSection(ConfigurationSection).Bind(options); + options.Validate(); + }); + + services.AddTransient(); + services.AddTransient(); + services.AddTransient(); + + services.PostConfigure(options => + { + EnsureJob(options, CiscoJobKinds.Fetch, typeof(CiscoFetchJob)); + EnsureJob(options, CiscoJobKinds.Parse, typeof(CiscoParseJob)); + EnsureJob(options, CiscoJobKinds.Map, typeof(CiscoMapJob)); + }); + + return services; + } + + private static void EnsureJob(JobSchedulerOptions options, string kind, Type jobType) + { + if (options.Definitions.ContainsKey(kind)) + { + return; + } + + options.Definitions[kind] = new JobDefinition( + kind, + jobType, + options.DefaultTimeout, + options.DefaultLeaseDuration, + CronExpression: null, + Enabled: true); + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/CiscoServiceCollectionExtensions.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/CiscoServiceCollectionExtensions.cs new file mode 100644 index 00000000..d38e2b06 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/CiscoServiceCollectionExtensions.cs @@ -0,0 +1,68 @@ +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; +using StellaOps.Feedser.Source.Common.Fetch; +using StellaOps.Feedser.Source.Common.Http; +using StellaOps.Feedser.Source.Vndr.Cisco.Configuration; +using StellaOps.Feedser.Source.Vndr.Cisco.Internal; + +namespace StellaOps.Feedser.Source.Vndr.Cisco; + +public static class CiscoServiceCollectionExtensions +{ + public static IServiceCollection AddCiscoConnector(this IServiceCollection services, Action configure) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configure); + + services.AddOptions() + .Configure(configure) + .PostConfigure(static opts => opts.Validate()) + .ValidateOnStart(); + + services.TryAddSingleton(_ => TimeProvider.System); + services.AddSingleton(); + services.AddSingleton(); + services.AddTransient(); + + services.AddHttpClient(CiscoOptions.AuthHttpClientName) + .ConfigureHttpClient((sp, client) => + { + var options = sp.GetRequiredService>().Value; + client.Timeout = options.RequestTimeout; + client.DefaultRequestHeaders.UserAgent.ParseAdd("StellaOps.Feedser.Cisco/1.0"); + client.DefaultRequestHeaders.Accept.ParseAdd("application/json"); + if (options.TokenEndpoint is not null) + { + client.BaseAddress = new Uri(options.TokenEndpoint.GetLeftPart(UriPartial.Authority)); + } + }); + + services.AddSourceHttpClient(CiscoOptions.HttpClientName, static (sp, clientOptions) => + { + var options = sp.GetRequiredService>().Value; + clientOptions.Timeout = options.RequestTimeout; + clientOptions.UserAgent = "StellaOps.Feedser.Cisco/1.0"; + clientOptions.AllowedHosts.Clear(); + clientOptions.AllowedHosts.Add(options.BaseUri.Host); + clientOptions.AllowedHosts.Add("sec.cloudapps.cisco.com"); + clientOptions.AllowedHosts.Add("www.cisco.com"); + clientOptions.MaxAttempts = 5; + clientOptions.BaseDelay = TimeSpan.FromSeconds(2); + }).AddHttpMessageHandler(); + + services.AddSingleton(sp => + { + var fetchService = sp.GetRequiredService(); + var optionsMonitor = sp.GetRequiredService>(); + var logger = sp.GetRequiredService>(); + return new CiscoOpenVulnClient(fetchService, optionsMonitor, logger, VndrCiscoConnectorPlugin.SourceName); + }); + + services.AddSingleton(); + services.AddSingleton(); + services.AddTransient(); + return services; + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/Class1.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/Class1.cs deleted file mode 100644 index 89524ed7..00000000 --- a/src/StellaOps.Feedser.Source.Vndr.Cisco/Class1.cs +++ /dev/null @@ -1,29 +0,0 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Plugin; - -namespace StellaOps.Feedser.Source.Vndr.Cisco; - -public sealed class VndrCiscoConnectorPlugin : IConnectorPlugin -{ - public string Name => "vndr-cisco"; - - public bool IsAvailable(IServiceProvider services) => true; - - public IFeedConnector Create(IServiceProvider services) => new StubConnector(Name); - - private sealed class StubConnector : IFeedConnector - { - public StubConnector(string sourceName) => SourceName = sourceName; - - public string SourceName { get; } - - public Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - - public Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - - public Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - } -} - diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/Configuration/CiscoOptions.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/Configuration/CiscoOptions.cs new file mode 100644 index 00000000..9b28458b --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/Configuration/CiscoOptions.cs @@ -0,0 +1,124 @@ +using System.Globalization; + +namespace StellaOps.Feedser.Source.Vndr.Cisco.Configuration; + +public sealed class CiscoOptions +{ + public const string HttpClientName = "feedser.source.vndr.cisco"; + public const string AuthHttpClientName = "feedser.source.vndr.cisco.auth"; + + public Uri BaseUri { get; set; } = new("https://api.cisco.com/security/advisories/v2/", UriKind.Absolute); + + public Uri TokenEndpoint { get; set; } = new("https://id.cisco.com/oauth2/default/v1/token", UriKind.Absolute); + + public string ClientId { get; set; } = string.Empty; + + public string ClientSecret { get; set; } = string.Empty; + + public int PageSize { get; set; } = 100; + + public int MaxPagesPerFetch { get; set; } = 5; + + public int MaxAdvisoriesPerFetch { get; set; } = 200; + + public TimeSpan InitialBackfillWindow { get; set; } = TimeSpan.FromDays(30); + + public TimeSpan RequestDelay { get; set; } = TimeSpan.FromMilliseconds(250); + + public TimeSpan RequestTimeout { get; set; } = TimeSpan.FromSeconds(30); + + public TimeSpan FailureBackoff { get; set; } = TimeSpan.FromMinutes(5); + + public TimeSpan TokenRefreshSkew { get; set; } = TimeSpan.FromMinutes(1); + + public string LastModifiedPathTemplate { get; set; } = "advisories/lastmodified/{0}"; + + public void Validate() + { + if (BaseUri is null || !BaseUri.IsAbsoluteUri) + { + throw new InvalidOperationException("Cisco BaseUri must be an absolute URI."); + } + + if (TokenEndpoint is null || !TokenEndpoint.IsAbsoluteUri) + { + throw new InvalidOperationException("Cisco TokenEndpoint must be an absolute URI."); + } + + if (string.IsNullOrWhiteSpace(ClientId)) + { + throw new InvalidOperationException("Cisco clientId must be configured."); + } + + if (string.IsNullOrWhiteSpace(ClientSecret)) + { + throw new InvalidOperationException("Cisco clientSecret must be configured."); + } + + if (PageSize is < 1 or > 100) + { + throw new InvalidOperationException("Cisco PageSize must be between 1 and 100."); + } + + if (MaxPagesPerFetch <= 0) + { + throw new InvalidOperationException("Cisco MaxPagesPerFetch must be greater than zero."); + } + + if (MaxAdvisoriesPerFetch <= 0) + { + throw new InvalidOperationException("Cisco MaxAdvisoriesPerFetch must be greater than zero."); + } + + if (InitialBackfillWindow <= TimeSpan.Zero) + { + throw new InvalidOperationException("Cisco InitialBackfillWindow must be positive."); + } + + if (RequestDelay < TimeSpan.Zero) + { + throw new InvalidOperationException("Cisco RequestDelay cannot be negative."); + } + + if (RequestTimeout <= TimeSpan.Zero) + { + throw new InvalidOperationException("Cisco RequestTimeout must be positive."); + } + + if (FailureBackoff <= TimeSpan.Zero) + { + throw new InvalidOperationException("Cisco FailureBackoff must be positive."); + } + + if (TokenRefreshSkew < TimeSpan.FromSeconds(5)) + { + throw new InvalidOperationException("Cisco TokenRefreshSkew must be at least 5 seconds."); + } + + if (string.IsNullOrWhiteSpace(LastModifiedPathTemplate)) + { + throw new InvalidOperationException("Cisco LastModifiedPathTemplate must be configured."); + } + } + + public Uri BuildLastModifiedUri(DateOnly date, int pageIndex, int pageSize) + { + if (pageIndex < 1) + { + throw new ArgumentOutOfRangeException(nameof(pageIndex), pageIndex, "Page index must be >= 1."); + } + + if (pageSize is < 1 or > 100) + { + throw new ArgumentOutOfRangeException(nameof(pageSize), pageSize, "Page size must be between 1 and 100."); + } + + var path = string.Format(CultureInfo.InvariantCulture, LastModifiedPathTemplate, date.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture)); + var builder = new UriBuilder(BaseUri); + var basePath = builder.Path.TrimEnd('/'); + builder.Path = $"{basePath}/{path}".Replace("//", "/", StringComparison.Ordinal); + var query = $"pageIndex={pageIndex.ToString(CultureInfo.InvariantCulture)}&pageSize={pageSize.ToString(CultureInfo.InvariantCulture)}"; + builder.Query = string.IsNullOrEmpty(builder.Query) ? query : builder.Query.TrimStart('?') + "&" + query; + return builder.Uri; + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoAccessTokenProvider.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoAccessTokenProvider.cs new file mode 100644 index 00000000..dbf670e2 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoAccessTokenProvider.cs @@ -0,0 +1,145 @@ +using System.Net.Http.Headers; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Feedser.Source.Vndr.Cisco.Configuration; + +namespace StellaOps.Feedser.Source.Vndr.Cisco.Internal; + +internal sealed class CiscoAccessTokenProvider : IDisposable +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + }; + + private readonly IHttpClientFactory _httpClientFactory; + private readonly IOptionsMonitor _options; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + private readonly SemaphoreSlim _refreshLock = new(1, 1); + + private volatile AccessToken? _cached; + private bool _disposed; + + public CiscoAccessTokenProvider( + IHttpClientFactory httpClientFactory, + IOptionsMonitor options, + TimeProvider? timeProvider, + ILogger logger) + { + _httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task GetTokenAsync(CancellationToken cancellationToken) + => await GetTokenInternalAsync(forceRefresh: false, cancellationToken).ConfigureAwait(false); + + public void Invalidate() + => _cached = null; + + private async Task GetTokenInternalAsync(bool forceRefresh, CancellationToken cancellationToken) + { + ThrowIfDisposed(); + + var options = _options.CurrentValue; + var now = _timeProvider.GetUtcNow(); + var cached = _cached; + if (!forceRefresh && cached is not null && now < cached.ExpiresAt - options.TokenRefreshSkew) + { + return cached.Value; + } + + await _refreshLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + cached = _cached; + now = _timeProvider.GetUtcNow(); + if (!forceRefresh && cached is not null && now < cached.ExpiresAt - options.TokenRefreshSkew) + { + return cached.Value; + } + + var fresh = await RequestTokenAsync(options, cancellationToken).ConfigureAwait(false); + _cached = fresh; + return fresh.Value; + } + finally + { + _refreshLock.Release(); + } + } + + private async Task RequestTokenAsync(CiscoOptions options, CancellationToken cancellationToken) + { + var client = _httpClientFactory.CreateClient(CiscoOptions.AuthHttpClientName); + client.Timeout = options.RequestTimeout; + + using var request = new HttpRequestMessage(HttpMethod.Post, options.TokenEndpoint); + request.Headers.Accept.Clear(); + request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); + + var content = new FormUrlEncodedContent(new Dictionary + { + ["grant_type"] = "client_credentials", + ["client_id"] = options.ClientId, + ["client_secret"] = options.ClientSecret, + }); + + request.Content = content; + + using var response = await client.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var preview = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + var message = $"Cisco OAuth token request failed with status {(int)response.StatusCode} {response.StatusCode}."; + _logger.LogError("Cisco openVuln token request failed: {Message}; response={Preview}", message, preview); + throw new HttpRequestException(message); + } + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + var payload = await JsonSerializer.DeserializeAsync(stream, SerializerOptions, cancellationToken).ConfigureAwait(false); + if (payload is null || string.IsNullOrWhiteSpace(payload.AccessToken)) + { + throw new InvalidOperationException("Cisco OAuth token response did not include an access token."); + } + + var expiresIn = payload.ExpiresIn > 0 ? TimeSpan.FromSeconds(payload.ExpiresIn) : TimeSpan.FromHours(1); + var now = _timeProvider.GetUtcNow(); + var expiresAt = now + expiresIn; + _logger.LogInformation("Cisco openVuln token issued; expires in {ExpiresIn}", expiresIn); + return new AccessToken(payload.AccessToken, expiresAt); + } + + public async Task RefreshAsync(CancellationToken cancellationToken) + => await GetTokenInternalAsync(forceRefresh: true, cancellationToken).ConfigureAwait(false); + + private void ThrowIfDisposed() + { + if (_disposed) + { + throw new ObjectDisposedException(nameof(CiscoAccessTokenProvider)); + } + } + + public void Dispose() + { + if (_disposed) + { + return; + } + + _refreshLock.Dispose(); + _disposed = true; + } + + private sealed record AccessToken(string Value, DateTimeOffset ExpiresAt); + + private sealed record TokenResponse( + [property: JsonPropertyName("access_token")] string AccessToken, + [property: JsonPropertyName("expires_in")] int ExpiresIn, + [property: JsonPropertyName("token_type")] string? TokenType); +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoAdvisoryDto.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoAdvisoryDto.cs new file mode 100644 index 00000000..6f99b138 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoAdvisoryDto.cs @@ -0,0 +1,25 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Feedser.Source.Vndr.Cisco.Internal; + +public sealed record CiscoAdvisoryDto( + string AdvisoryId, + string Title, + string? Summary, + string? Severity, + DateTimeOffset? Published, + DateTimeOffset? Updated, + string? PublicationUrl, + string? CsafUrl, + string? CvrfUrl, + double? CvssBaseScore, + IReadOnlyList Cves, + IReadOnlyList BugIds, + IReadOnlyList Products); + +public sealed record CiscoAffectedProductDto( + string Name, + string? ProductId, + string? Version, + IReadOnlyCollection Statuses); diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoCsafClient.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoCsafClient.cs new file mode 100644 index 00000000..623bbb9d --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoCsafClient.cs @@ -0,0 +1,64 @@ +using System; +using System.IO; +using System.Net.Http; +using System.Text; +using Microsoft.Extensions.Logging; +using StellaOps.Feedser.Source.Common.Fetch; +using StellaOps.Feedser.Source.Vndr.Cisco.Configuration; + +namespace StellaOps.Feedser.Source.Vndr.Cisco.Internal; + +public interface ICiscoCsafClient +{ + Task TryFetchAsync(string? url, CancellationToken cancellationToken); +} + +public class CiscoCsafClient : ICiscoCsafClient +{ + private static readonly string[] AcceptHeaders = { "application/json", "application/csaf+json", "application/vnd.cisco.csaf+json" }; + + private readonly SourceFetchService _fetchService; + private readonly ILogger _logger; + + public CiscoCsafClient(SourceFetchService fetchService, ILogger logger) + { + _fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public virtual async Task TryFetchAsync(string? url, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(url)) + { + return null; + } + + if (!Uri.TryCreate(url, UriKind.Absolute, out var uri)) + { + _logger.LogWarning("Cisco CSAF URL '{Url}' is not a valid absolute URI.", url); + return null; + } + + try + { + var request = new SourceFetchRequest(CiscoOptions.HttpClientName, VndrCiscoConnectorPlugin.SourceName, uri) + { + AcceptHeaders = AcceptHeaders, + }; + + var result = await _fetchService.FetchContentAsync(request, cancellationToken).ConfigureAwait(false); + if (!result.IsSuccess || result.Content is null) + { + _logger.LogWarning("Cisco CSAF download returned status {Status} for {Url}", result.StatusCode, url); + return null; + } + + return System.Text.Encoding.UTF8.GetString(result.Content); + } + catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException or IOException or InvalidOperationException) + { + _logger.LogWarning(ex, "Cisco CSAF download failed for {Url}", url); + return null; + } + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoCsafData.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoCsafData.cs new file mode 100644 index 00000000..5c57caa5 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoCsafData.cs @@ -0,0 +1,9 @@ +using System.Collections.Generic; + +namespace StellaOps.Feedser.Source.Vndr.Cisco.Internal; + +internal sealed record CiscoCsafData( + IReadOnlyDictionary Products, + IReadOnlyDictionary> ProductStatuses); + +internal sealed record CiscoCsafProduct(string ProductId, string Name); diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoCsafParser.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoCsafParser.cs new file mode 100644 index 00000000..9f0b5636 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoCsafParser.cs @@ -0,0 +1,123 @@ +using System.Collections.Generic; +using System.Linq; +using System.Text.Json; + +namespace StellaOps.Feedser.Source.Vndr.Cisco.Internal; + +internal static class CiscoCsafParser +{ + public static CiscoCsafData Parse(string content) + { + if (string.IsNullOrWhiteSpace(content)) + { + return new CiscoCsafData( + Products: new Dictionary(0, StringComparer.OrdinalIgnoreCase), + ProductStatuses: new Dictionary>(0, StringComparer.OrdinalIgnoreCase)); + } + + using var document = JsonDocument.Parse(content); + var root = document.RootElement; + + var products = ParseProducts(root); + var statuses = ParseStatuses(root); + + return new CiscoCsafData(products, statuses); + } + + private static IReadOnlyDictionary ParseProducts(JsonElement root) + { + var dictionary = new Dictionary(StringComparer.OrdinalIgnoreCase); + if (!root.TryGetProperty("product_tree", out var productTree)) + { + return dictionary; + } + + if (productTree.TryGetProperty("full_product_names", out var fullProductNames) + && fullProductNames.ValueKind == JsonValueKind.Array) + { + foreach (var entry in fullProductNames.EnumerateArray()) + { + var productId = entry.TryGetProperty("product_id", out var idElement) && idElement.ValueKind == JsonValueKind.String + ? idElement.GetString() + : null; + + if (string.IsNullOrWhiteSpace(productId)) + { + continue; + } + + var name = entry.TryGetProperty("name", out var nameElement) && nameElement.ValueKind == JsonValueKind.String + ? nameElement.GetString() + : null; + + if (string.IsNullOrWhiteSpace(name)) + { + name = productId; + } + + dictionary[productId] = new CiscoCsafProduct(productId, name); + } + } + + return dictionary; + } + + private static IReadOnlyDictionary> ParseStatuses(JsonElement root) + { + var map = new Dictionary>(StringComparer.OrdinalIgnoreCase); + + if (!root.TryGetProperty("vulnerabilities", out var vulnerabilities) + || vulnerabilities.ValueKind != JsonValueKind.Array) + { + return map.ToDictionary( + static kvp => kvp.Key, + static kvp => (IReadOnlyCollection)kvp.Value.ToArray(), + StringComparer.OrdinalIgnoreCase); + } + + foreach (var vulnerability in vulnerabilities.EnumerateArray()) + { + if (!vulnerability.TryGetProperty("product_status", out var productStatus) + || productStatus.ValueKind != JsonValueKind.Object) + { + continue; + } + + foreach (var property in productStatus.EnumerateObject()) + { + var statusLabel = property.Name; + if (property.Value.ValueKind != JsonValueKind.Array) + { + continue; + } + + foreach (var productIdElement in property.Value.EnumerateArray()) + { + if (productIdElement.ValueKind != JsonValueKind.String) + { + continue; + } + + var productId = productIdElement.GetString(); + if (string.IsNullOrWhiteSpace(productId)) + { + continue; + } + + if (!map.TryGetValue(productId, out var set)) + { + set = new HashSet(StringComparer.OrdinalIgnoreCase); + map[productId] = set; + } + + set.Add(statusLabel); + } + } + } + + return map.ToDictionary( + static kvp => kvp.Key, + static kvp => (IReadOnlyCollection)kvp.Value.ToArray(), + StringComparer.OrdinalIgnoreCase); + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoCursor.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoCursor.cs new file mode 100644 index 00000000..58801d17 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoCursor.cs @@ -0,0 +1,101 @@ +using MongoDB.Bson; + +namespace StellaOps.Feedser.Source.Vndr.Cisco.Internal; + +internal sealed record CiscoCursor( + DateTimeOffset? LastModified, + string? LastAdvisoryId, + IReadOnlyCollection PendingDocuments, + IReadOnlyCollection PendingMappings) +{ + private static readonly IReadOnlyCollection EmptyGuidCollection = Array.Empty(); + + public static CiscoCursor Empty { get; } = new(null, null, EmptyGuidCollection, EmptyGuidCollection); + + public BsonDocument ToBson() + { + var document = new BsonDocument + { + ["pendingDocuments"] = new BsonArray(PendingDocuments.Select(id => id.ToString())), + ["pendingMappings"] = new BsonArray(PendingMappings.Select(id => id.ToString())), + }; + + if (LastModified.HasValue) + { + document["lastModified"] = LastModified.Value.UtcDateTime; + } + + if (!string.IsNullOrWhiteSpace(LastAdvisoryId)) + { + document["lastAdvisoryId"] = LastAdvisoryId; + } + + return document; + } + + public static CiscoCursor FromBson(BsonDocument? document) + { + if (document is null || document.ElementCount == 0) + { + return Empty; + } + + DateTimeOffset? lastModified = null; + if (document.TryGetValue("lastModified", out var lastModifiedValue)) + { + lastModified = lastModifiedValue.BsonType switch + { + BsonType.DateTime => DateTime.SpecifyKind(lastModifiedValue.ToUniversalTime(), DateTimeKind.Utc), + BsonType.String when DateTimeOffset.TryParse(lastModifiedValue.AsString, out var parsed) => parsed.ToUniversalTime(), + _ => null, + }; + } + + string? lastAdvisoryId = null; + if (document.TryGetValue("lastAdvisoryId", out var idValue) && idValue.BsonType == BsonType.String) + { + var value = idValue.AsString.Trim(); + if (value.Length > 0) + { + lastAdvisoryId = value; + } + } + + var pendingDocuments = ReadGuidArray(document, "pendingDocuments"); + var pendingMappings = ReadGuidArray(document, "pendingMappings"); + + return new CiscoCursor(lastModified, lastAdvisoryId, pendingDocuments, pendingMappings); + } + + public CiscoCursor WithCheckpoint(DateTimeOffset lastModified, string advisoryId) + => this with + { + LastModified = lastModified.ToUniversalTime(), + LastAdvisoryId = string.IsNullOrWhiteSpace(advisoryId) ? null : advisoryId.Trim(), + }; + + public CiscoCursor WithPendingDocuments(IEnumerable? documents) + => this with { PendingDocuments = documents?.Distinct().ToArray() ?? EmptyGuidCollection }; + + public CiscoCursor WithPendingMappings(IEnumerable? mappings) + => this with { PendingMappings = mappings?.Distinct().ToArray() ?? EmptyGuidCollection }; + + private static IReadOnlyCollection ReadGuidArray(BsonDocument document, string key) + { + if (!document.TryGetValue(key, out var value) || value is not BsonArray array) + { + return EmptyGuidCollection; + } + + var results = new List(array.Count); + foreach (var element in array) + { + if (Guid.TryParse(element.ToString(), out var guid)) + { + results.Add(guid); + } + } + + return results; + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoDiagnostics.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoDiagnostics.cs new file mode 100644 index 00000000..7941f2e0 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoDiagnostics.cs @@ -0,0 +1,82 @@ +using System.Diagnostics.Metrics; + +namespace StellaOps.Feedser.Source.Vndr.Cisco.Internal; + +public sealed class CiscoDiagnostics : IDisposable +{ + public const string MeterName = "StellaOps.Feedser.Source.Vndr.Cisco"; + private const string MeterVersion = "1.0.0"; + + private readonly Meter _meter; + private readonly Counter _fetchDocuments; + private readonly Counter _fetchFailures; + private readonly Counter _fetchUnchanged; + private readonly Counter _parseSuccess; + private readonly Counter _parseFailures; + private readonly Counter _mapSuccess; + private readonly Counter _mapFailures; + private readonly Histogram _mapAffected; + + public CiscoDiagnostics() + { + _meter = new Meter(MeterName, MeterVersion); + _fetchDocuments = _meter.CreateCounter( + name: "cisco.fetch.documents", + unit: "documents", + description: "Number of Cisco advisories fetched."); + _fetchFailures = _meter.CreateCounter( + name: "cisco.fetch.failures", + unit: "operations", + description: "Number of Cisco fetch failures."); + _fetchUnchanged = _meter.CreateCounter( + name: "cisco.fetch.unchanged", + unit: "documents", + description: "Number of Cisco advisories skipped because they were unchanged."); + _parseSuccess = _meter.CreateCounter( + name: "cisco.parse.success", + unit: "documents", + description: "Number of Cisco documents parsed successfully."); + _parseFailures = _meter.CreateCounter( + name: "cisco.parse.failures", + unit: "documents", + description: "Number of Cisco documents that failed to parse."); + _mapSuccess = _meter.CreateCounter( + name: "cisco.map.success", + unit: "documents", + description: "Number of Cisco advisories mapped successfully."); + _mapFailures = _meter.CreateCounter( + name: "cisco.map.failures", + unit: "documents", + description: "Number of Cisco advisories that failed to map to canonical form."); + _mapAffected = _meter.CreateHistogram( + name: "cisco.map.affected.packages", + unit: "packages", + description: "Distribution of affected package counts emitted per Cisco advisory."); + } + + public Meter Meter => _meter; + + public void FetchDocument() => _fetchDocuments.Add(1); + + public void FetchFailure() => _fetchFailures.Add(1); + + public void FetchUnchanged() => _fetchUnchanged.Add(1); + + public void ParseSuccess() => _parseSuccess.Add(1); + + public void ParseFailure() => _parseFailures.Add(1); + + public void MapSuccess() => _mapSuccess.Add(1); + + public void MapFailure() => _mapFailures.Add(1); + + public void MapAffected(int count) + { + if (count >= 0) + { + _mapAffected.Record(count); + } + } + + public void Dispose() => _meter.Dispose(); +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoDtoFactory.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoDtoFactory.cs new file mode 100644 index 00000000..61a4b17d --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoDtoFactory.cs @@ -0,0 +1,190 @@ +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Text.Json; +using Microsoft.Extensions.Logging; +using StellaOps.Feedser.Models; + +namespace StellaOps.Feedser.Source.Vndr.Cisco.Internal; + +public class CiscoDtoFactory +{ + private readonly ICiscoCsafClient _csafClient; + private readonly ILogger _logger; + + public CiscoDtoFactory(ICiscoCsafClient csafClient, ILogger logger) + { + _csafClient = csafClient ?? throw new ArgumentNullException(nameof(csafClient)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task CreateAsync(CiscoRawAdvisory raw, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(raw); + + var advisoryId = raw.AdvisoryId?.Trim(); + if (string.IsNullOrWhiteSpace(advisoryId)) + { + throw new InvalidOperationException("Cisco advisory is missing advisoryId."); + } + + var title = string.IsNullOrWhiteSpace(raw.AdvisoryTitle) ? advisoryId : raw.AdvisoryTitle!.Trim(); + var severity = SeverityNormalization.Normalize(raw.Sir); + var published = ParseDate(raw.FirstPublished); + var updated = ParseDate(raw.LastUpdated); + + CiscoCsafData? csafData = null; + if (!string.IsNullOrWhiteSpace(raw.CsafUrl)) + { + var csafContent = await _csafClient.TryFetchAsync(raw.CsafUrl, cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrWhiteSpace(csafContent)) + { + try + { + csafData = CiscoCsafParser.Parse(csafContent!); + } + catch (JsonException ex) + { + _logger.LogWarning(ex, "Cisco CSAF payload parsing failed for {AdvisoryId}", advisoryId); + } + } + } + + var products = BuildProducts(raw, csafData); + var cves = NormalizeList(raw.Cves); + var bugIds = NormalizeList(raw.BugIds); + var cvss = ParseDouble(raw.CvssBaseScore); + + return new CiscoAdvisoryDto( + AdvisoryId: advisoryId, + Title: title, + Summary: string.IsNullOrWhiteSpace(raw.Summary) ? null : raw.Summary!.Trim(), + Severity: severity, + Published: published, + Updated: updated, + PublicationUrl: NormalizeUrl(raw.PublicationUrl), + CsafUrl: NormalizeUrl(raw.CsafUrl), + CvrfUrl: NormalizeUrl(raw.CvrfUrl), + CvssBaseScore: cvss, + Cves: cves, + BugIds: bugIds, + Products: products); + } + + private static IReadOnlyList BuildProducts(CiscoRawAdvisory raw, CiscoCsafData? csafData) + { + var map = new Dictionary(StringComparer.OrdinalIgnoreCase); + + if (csafData is not null) + { + foreach (var entry in csafData.ProductStatuses) + { + var productId = entry.Key; + var name = csafData.Products.TryGetValue(productId, out var product) + ? product.Name + : productId; + + var statuses = NormalizeStatuses(entry.Value); + map[name] = new CiscoAffectedProductDto( + Name: name, + ProductId: productId, + Version: raw.Version?.Trim(), + Statuses: statuses); + } + } + + var rawProducts = NormalizeList(raw.ProductNames); + foreach (var productName in rawProducts) + { + if (map.ContainsKey(productName)) + { + continue; + } + + map[productName] = new CiscoAffectedProductDto( + Name: productName, + ProductId: null, + Version: raw.Version?.Trim(), + Statuses: new[] { AffectedPackageStatusCatalog.KnownAffected }); + } + + return map.Count == 0 + ? Array.Empty() + : map.Values + .OrderBy(static p => p.Name, StringComparer.OrdinalIgnoreCase) + .ThenBy(static p => p.ProductId, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static IReadOnlyCollection NormalizeStatuses(IEnumerable statuses) + { + var set = new SortedSet(StringComparer.OrdinalIgnoreCase); + foreach (var status in statuses) + { + if (AffectedPackageStatusCatalog.TryNormalize(status, out var normalized)) + { + set.Add(normalized); + } + else if (!string.IsNullOrWhiteSpace(status)) + { + set.Add(status.Trim().ToLowerInvariant()); + } + } + + if (set.Count == 0) + { + set.Add(AffectedPackageStatusCatalog.KnownAffected); + } + + return set; + } + + private static IReadOnlyList NormalizeList(IEnumerable? items) + { + if (items is null) + { + return Array.Empty(); + } + + var set = new SortedSet(StringComparer.OrdinalIgnoreCase); + foreach (var item in items) + { + if (!string.IsNullOrWhiteSpace(item)) + { + set.Add(item.Trim()); + } + } + + return set.Count == 0 ? Array.Empty() : set.ToArray(); + } + + private static double? ParseDouble(string? value) + => double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed) + ? parsed + : null; + + private static DateTimeOffset? ParseDate(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + if (DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var parsed)) + { + return parsed.ToUniversalTime(); + } + + return null; + } + + private static string? NormalizeUrl(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + return Uri.TryCreate(value.Trim(), UriKind.Absolute, out var uri) ? uri.ToString() : null; + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoMapper.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoMapper.cs new file mode 100644 index 00000000..204b849f --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoMapper.cs @@ -0,0 +1,263 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using StellaOps.Feedser.Models; +using StellaOps.Feedser.Source.Common.Packages; +using StellaOps.Feedser.Storage.Mongo.Documents; +using StellaOps.Feedser.Storage.Mongo.Dtos; + +namespace StellaOps.Feedser.Source.Vndr.Cisco.Internal; + +public static class CiscoMapper +{ + public static Advisory Map(CiscoAdvisoryDto dto, DocumentRecord document, DtoRecord dtoRecord) + { + ArgumentNullException.ThrowIfNull(dto); + ArgumentNullException.ThrowIfNull(document); + ArgumentNullException.ThrowIfNull(dtoRecord); + + var recordedAt = dtoRecord.ValidatedAt.ToUniversalTime(); + var fetchProvenance = new AdvisoryProvenance( + VndrCiscoConnectorPlugin.SourceName, + "document", + document.Uri, + document.FetchedAt.ToUniversalTime()); + + var mapProvenance = new AdvisoryProvenance( + VndrCiscoConnectorPlugin.SourceName, + "map", + dto.AdvisoryId, + recordedAt); + + var aliases = BuildAliases(dto); + var references = BuildReferences(dto, recordedAt); + var affected = BuildAffectedPackages(dto, recordedAt); + + return new Advisory( + advisoryKey: dto.AdvisoryId, + title: dto.Title, + summary: dto.Summary, + language: "en", + published: dto.Published, + modified: dto.Updated, + severity: dto.Severity, + exploitKnown: false, + aliases: aliases, + references: references, + affectedPackages: affected, + cvssMetrics: Array.Empty(), + provenance: new[] { fetchProvenance, mapProvenance }); + } + + private static IReadOnlyList BuildAliases(CiscoAdvisoryDto dto) + { + var set = new HashSet(StringComparer.OrdinalIgnoreCase) + { + dto.AdvisoryId, + }; + + foreach (var cve in dto.Cves) + { + if (!string.IsNullOrWhiteSpace(cve)) + { + set.Add(cve.Trim()); + } + } + + foreach (var bugId in dto.BugIds) + { + if (!string.IsNullOrWhiteSpace(bugId)) + { + set.Add(bugId.Trim()); + } + } + + if (dto.PublicationUrl is not null) + { + set.Add(dto.PublicationUrl); + } + + return set.Count == 0 + ? Array.Empty() + : set.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase).ToArray(); + } + + private static IReadOnlyList BuildReferences(CiscoAdvisoryDto dto, DateTimeOffset recordedAt) + { + var list = new List(3); + AddReference(list, dto.PublicationUrl, "publication", recordedAt); + AddReference(list, dto.CvrfUrl, "cvrf", recordedAt); + AddReference(list, dto.CsafUrl, "csaf", recordedAt); + + return list.Count == 0 + ? Array.Empty() + : list.OrderBy(static reference => reference.Url, StringComparer.OrdinalIgnoreCase).ToArray(); + } + + private static void AddReference(ICollection references, string? url, string kind, DateTimeOffset recordedAt) + { + if (string.IsNullOrWhiteSpace(url)) + { + return; + } + + if (!Uri.TryCreate(url, UriKind.Absolute, out var uri)) + { + return; + } + + var provenance = new AdvisoryProvenance( + VndrCiscoConnectorPlugin.SourceName, + $"reference:{kind}", + uri.ToString(), + recordedAt); + + try + { + references.Add(new AdvisoryReference( + url: uri.ToString(), + kind: kind, + sourceTag: null, + summary: null, + provenance: provenance)); + } + catch (ArgumentException) + { + // ignore invalid URLs + } + } + + private static IReadOnlyList BuildAffectedPackages(CiscoAdvisoryDto dto, DateTimeOffset recordedAt) + { + if (dto.Products.Count == 0) + { + return Array.Empty(); + } + + var packages = new List(dto.Products.Count); + foreach (var product in dto.Products) + { + if (string.IsNullOrWhiteSpace(product.Name)) + { + continue; + } + + var range = BuildVersionRange(product, recordedAt); + var statuses = BuildStatuses(product, recordedAt); + var provenance = new[] + { + new AdvisoryProvenance( + VndrCiscoConnectorPlugin.SourceName, + "affected", + product.ProductId ?? product.Name, + recordedAt), + }; + + packages.Add(new AffectedPackage( + type: AffectedPackageTypes.Vendor, + identifier: product.Name, + platform: null, + versionRanges: range is null ? Array.Empty() : new[] { range }, + statuses: statuses, + provenance: provenance, + normalizedVersions: Array.Empty())); + } + + return packages.Count == 0 + ? Array.Empty() + : packages.OrderBy(static p => p.Identifier, StringComparer.OrdinalIgnoreCase).ToArray(); + } + + private static AffectedVersionRange? BuildVersionRange(CiscoAffectedProductDto product, DateTimeOffset recordedAt) + { + if (string.IsNullOrWhiteSpace(product.Version)) + { + return null; + } + + var version = product.Version.Trim(); + RangePrimitives? primitives = null; + string rangeKind = "vendor"; + string? rangeExpression = version; + + if (PackageCoordinateHelper.TryParseSemVer(version, out _, out var normalized)) + { + var semver = new SemVerPrimitive( + Introduced: null, + IntroducedInclusive: true, + Fixed: null, + FixedInclusive: false, + LastAffected: null, + LastAffectedInclusive: true, + ConstraintExpression: null, + ExactValue: normalized); + + primitives = new RangePrimitives(semver, null, null, BuildVendorExtensions(product)); + rangeKind = "semver"; + rangeExpression = normalized; + } + else + { + primitives = new RangePrimitives(null, null, null, BuildVendorExtensions(product, includeVersion: true)); + } + + var provenance = new AdvisoryProvenance( + VndrCiscoConnectorPlugin.SourceName, + "range", + product.ProductId ?? product.Name, + recordedAt); + + return new AffectedVersionRange( + rangeKind: rangeKind, + introducedVersion: null, + fixedVersion: null, + lastAffectedVersion: null, + rangeExpression: rangeExpression, + provenance: provenance, + primitives: primitives); + } + + private static IReadOnlyDictionary? BuildVendorExtensions(CiscoAffectedProductDto product, bool includeVersion = false) + { + var dictionary = new Dictionary(StringComparer.Ordinal); + if (!string.IsNullOrWhiteSpace(product.ProductId)) + { + dictionary["cisco.productId"] = product.ProductId!; + } + + if (includeVersion && !string.IsNullOrWhiteSpace(product.Version)) + { + dictionary["cisco.version.raw"] = product.Version!; + } + + return dictionary.Count == 0 ? null : dictionary; + } + + private static IReadOnlyList BuildStatuses(CiscoAffectedProductDto product, DateTimeOffset recordedAt) + { + if (product.Statuses is null || product.Statuses.Count == 0) + { + return Array.Empty(); + } + + var list = new List(product.Statuses.Count); + foreach (var status in product.Statuses) + { + if (!AffectedPackageStatusCatalog.TryNormalize(status, out var normalized) + || string.IsNullOrWhiteSpace(normalized)) + { + continue; + } + + var provenance = new AdvisoryProvenance( + VndrCiscoConnectorPlugin.SourceName, + "status", + product.ProductId ?? product.Name, + recordedAt); + + list.Add(new AffectedPackageStatus(normalized, provenance)); + } + + return list.Count == 0 ? Array.Empty() : list; + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoOAuthMessageHandler.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoOAuthMessageHandler.cs new file mode 100644 index 00000000..1d166753 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoOAuthMessageHandler.cs @@ -0,0 +1,101 @@ +using System.IO; +using System.Net; +using System.Net.Http.Headers; +using Microsoft.Extensions.Logging; + +namespace StellaOps.Feedser.Source.Vndr.Cisco.Internal; + +internal sealed class CiscoOAuthMessageHandler : DelegatingHandler +{ + private readonly CiscoAccessTokenProvider _tokenProvider; + private readonly ILogger _logger; + + public CiscoOAuthMessageHandler( + CiscoAccessTokenProvider tokenProvider, + ILogger logger) + { + _tokenProvider = tokenProvider ?? throw new ArgumentNullException(nameof(tokenProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + + HttpRequestMessage? retryTemplate = null; + try + { + retryTemplate = await CloneRequestAsync(request, cancellationToken).ConfigureAwait(false); + } + catch (IOException) + { + // Unable to buffer content; retry will fail if needed. + retryTemplate = null; + } + + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", await _tokenProvider.GetTokenAsync(cancellationToken).ConfigureAwait(false)); + var response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (response.StatusCode != HttpStatusCode.Unauthorized) + { + return response; + } + + response.Dispose(); + _logger.LogWarning("Cisco openVuln request returned 401 Unauthorized; refreshing access token."); + await _tokenProvider.RefreshAsync(cancellationToken).ConfigureAwait(false); + + if (retryTemplate is null) + { + _tokenProvider.Invalidate(); + throw new HttpRequestException("Cisco openVuln request returned 401 Unauthorized and could not be retried."); + } + + retryTemplate.Headers.Authorization = new AuthenticationHeaderValue("Bearer", await _tokenProvider.GetTokenAsync(cancellationToken).ConfigureAwait(false)); + + try + { + var retryResponse = await base.SendAsync(retryTemplate, cancellationToken).ConfigureAwait(false); + if (retryResponse.StatusCode == HttpStatusCode.Unauthorized) + { + _tokenProvider.Invalidate(); + } + + return retryResponse; + } + finally + { + retryTemplate.Dispose(); + } + } + + private static async Task CloneRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + var clone = new HttpRequestMessage(request.Method, request.RequestUri) + { + Version = request.Version, + VersionPolicy = request.VersionPolicy, + }; + + foreach (var header in request.Headers) + { + clone.Headers.TryAddWithoutValidation(header.Key, header.Value); + } + + if (request.Content is not null) + { + using var memory = new MemoryStream(); + await request.Content.CopyToAsync(memory, cancellationToken).ConfigureAwait(false); + memory.Position = 0; + var buffer = memory.ToArray(); + var contentClone = new ByteArrayContent(buffer); + foreach (var header in request.Content.Headers) + { + contentClone.Headers.TryAddWithoutValidation(header.Key, header.Value); + } + + clone.Content = contentClone; + } + + return clone; + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoOpenVulnClient.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoOpenVulnClient.cs new file mode 100644 index 00000000..8331bcf0 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoOpenVulnClient.cs @@ -0,0 +1,196 @@ +using System.Diagnostics.CodeAnalysis; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Feedser.Source.Common.Fetch; +using StellaOps.Feedser.Source.Vndr.Cisco.Configuration; + +namespace StellaOps.Feedser.Source.Vndr.Cisco.Internal; + +public sealed class CiscoOpenVulnClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + NumberHandling = JsonNumberHandling.AllowReadingFromString, + }; + + private readonly SourceFetchService _fetchService; + private readonly IOptionsMonitor _options; + private readonly ILogger _logger; + private readonly string _sourceName; + + public CiscoOpenVulnClient( + SourceFetchService fetchService, + IOptionsMonitor options, + ILogger logger, + string sourceName) + { + _fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _sourceName = sourceName ?? throw new ArgumentNullException(nameof(sourceName)); + } + + internal async Task FetchAsync(DateOnly date, int pageIndex, CancellationToken cancellationToken) + { + var options = _options.CurrentValue; + var requestUri = options.BuildLastModifiedUri(date, pageIndex, options.PageSize); + var request = new SourceFetchRequest(CiscoOptions.HttpClientName, _sourceName, requestUri) + { + AcceptHeaders = new[] { "application/json" }, + TimeoutOverride = options.RequestTimeout, + }; + + var result = await _fetchService.FetchContentAsync(request, cancellationToken).ConfigureAwait(false); + if (!result.IsSuccess || result.Content is null) + { + _logger.LogDebug("Cisco openVuln request returned empty payload for {Uri} (status {Status})", requestUri, result.StatusCode); + return null; + } + + return CiscoAdvisoryPage.Parse(result.Content); + } +} + +internal sealed record CiscoAdvisoryPage( + IReadOnlyList Advisories, + CiscoPagination Pagination) +{ + public bool HasMore => Pagination.PageIndex < Pagination.TotalPages; + + public static CiscoAdvisoryPage Parse(byte[] content) + { + using var document = JsonDocument.Parse(content); + var root = document.RootElement; + var advisories = new List(); + + if (root.TryGetProperty("advisories", out var advisoriesElement) && advisoriesElement.ValueKind == JsonValueKind.Array) + { + foreach (var advisory in advisoriesElement.EnumerateArray()) + { + if (!TryCreateItem(advisory, out var item)) + { + continue; + } + + advisories.Add(item); + } + } + + var pagination = CiscoPagination.FromJson(root.TryGetProperty("pagination", out var paginationElement) ? paginationElement : default); + return new CiscoAdvisoryPage(advisories, pagination); + } + + private static bool TryCreateItem(JsonElement advisory, [NotNullWhen(true)] out CiscoAdvisoryItem? item) + { + var rawJson = advisory.GetRawText(); + var advisoryId = GetString(advisory, "advisoryId"); + if (string.IsNullOrWhiteSpace(advisoryId)) + { + item = null; + return false; + } + + var lastUpdated = ParseDate(GetString(advisory, "lastUpdated")); + var firstPublished = ParseDate(GetString(advisory, "firstPublished")); + var severity = GetString(advisory, "sir"); + var publicationUrl = GetString(advisory, "publicationUrl"); + var csafUrl = GetString(advisory, "csafUrl"); + var cvrfUrl = GetString(advisory, "cvrfUrl"); + var cvss = GetString(advisory, "cvssBaseScore"); + + var cves = ReadStringArray(advisory, "cves"); + var bugIds = ReadStringArray(advisory, "bugIDs"); + var productNames = ReadStringArray(advisory, "productNames"); + + item = new CiscoAdvisoryItem( + advisoryId, + lastUpdated, + firstPublished, + severity, + publicationUrl, + csafUrl, + cvrfUrl, + cvss, + cves, + bugIds, + productNames, + rawJson); + return true; + } + + private static string? GetString(JsonElement element, string propertyName) + => element.TryGetProperty(propertyName, out var value) && value.ValueKind == JsonValueKind.String + ? value.GetString() + : null; + + private static DateTimeOffset? ParseDate(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + if (DateTimeOffset.TryParse(value, out var parsed)) + { + return parsed.ToUniversalTime(); + } + + return null; + } + + private static IReadOnlyList ReadStringArray(JsonElement element, string property) + { + if (!element.TryGetProperty(property, out var value) || value.ValueKind != JsonValueKind.Array) + { + return Array.Empty(); + } + + var results = new List(); + foreach (var child in value.EnumerateArray()) + { + if (child.ValueKind == JsonValueKind.String) + { + var text = child.GetString(); + if (!string.IsNullOrWhiteSpace(text)) + { + results.Add(text.Trim()); + } + } + } + + return results; + } +} + +internal sealed record CiscoAdvisoryItem( + string AdvisoryId, + DateTimeOffset? LastUpdated, + DateTimeOffset? FirstPublished, + string? Severity, + string? PublicationUrl, + string? CsafUrl, + string? CvrfUrl, + string? CvssBaseScore, + IReadOnlyList Cves, + IReadOnlyList BugIds, + IReadOnlyList ProductNames, + string RawJson) +{ + public byte[] GetRawBytes() => Encoding.UTF8.GetBytes(RawJson); +} + +internal sealed record CiscoPagination(int PageIndex, int PageSize, int TotalPages, int TotalRecords) +{ + public static CiscoPagination FromJson(JsonElement element) + { + var pageIndex = element.TryGetProperty("pageIndex", out var index) && index.TryGetInt32(out var parsedIndex) ? parsedIndex : 1; + var pageSize = element.TryGetProperty("pageSize", out var size) && size.TryGetInt32(out var parsedSize) ? parsedSize : 0; + var totalPages = element.TryGetProperty("totalPages", out var pages) && pages.TryGetInt32(out var parsedPages) ? parsedPages : pageIndex; + var totalRecords = element.TryGetProperty("totalRecords", out var records) && records.TryGetInt32(out var parsedRecords) ? parsedRecords : 0; + return new CiscoPagination(pageIndex, pageSize, totalPages, totalRecords); + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoRawAdvisory.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoRawAdvisory.cs new file mode 100644 index 00000000..37a745d6 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/Internal/CiscoRawAdvisory.cs @@ -0,0 +1,64 @@ +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Feedser.Source.Vndr.Cisco.Internal; + +public class CiscoRawAdvisory +{ + [JsonPropertyName("advisoryId")] + public string? AdvisoryId { get; set; } + + [JsonPropertyName("advisoryTitle")] + public string? AdvisoryTitle { get; set; } + + [JsonPropertyName("publicationUrl")] + public string? PublicationUrl { get; set; } + + [JsonPropertyName("cvrfUrl")] + public string? CvrfUrl { get; set; } + + [JsonPropertyName("csafUrl")] + public string? CsafUrl { get; set; } + + [JsonPropertyName("summary")] + public string? Summary { get; set; } + + [JsonPropertyName("sir")] + public string? Sir { get; set; } + + [JsonPropertyName("firstPublished")] + public string? FirstPublished { get; set; } + + [JsonPropertyName("lastUpdated")] + public string? LastUpdated { get; set; } + + [JsonPropertyName("productNames")] + public List? ProductNames { get; set; } + + [JsonPropertyName("version")] + public string? Version { get; set; } + + [JsonPropertyName("iosRelease")] + public string? IosRelease { get; set; } + + [JsonPropertyName("cves")] + public List? Cves { get; set; } + + [JsonPropertyName("bugIDs")] + public List? BugIds { get; set; } + + [JsonPropertyName("cvssBaseScore")] + public string? CvssBaseScore { get; set; } + + [JsonPropertyName("cvssTemporalScore")] + public string? CvssTemporalScore { get; set; } + + [JsonPropertyName("cvssEnvironmentalScore")] + public string? CvssEnvironmentalScore { get; set; } + + [JsonPropertyName("cvssBaseScoreVersion2")] + public string? CvssBaseScoreV2 { get; set; } + + [JsonPropertyName("status")] + public string? Status { get; set; } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/Jobs.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/Jobs.cs new file mode 100644 index 00000000..ddaa298e --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/Jobs.cs @@ -0,0 +1,46 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Feedser.Core.Jobs; + +namespace StellaOps.Feedser.Source.Vndr.Cisco; + +internal static class CiscoJobKinds +{ + public const string Fetch = "source:vndr-cisco:fetch"; + public const string Parse = "source:vndr-cisco:parse"; + public const string Map = "source:vndr-cisco:map"; +} + +internal sealed class CiscoFetchJob : IJob +{ + private readonly CiscoConnector _connector; + + public CiscoFetchJob(CiscoConnector connector) + => _connector = connector ?? throw new ArgumentNullException(nameof(connector)); + + public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken) + => _connector.FetchAsync(context.Services, cancellationToken); +} + +internal sealed class CiscoParseJob : IJob +{ + private readonly CiscoConnector _connector; + + public CiscoParseJob(CiscoConnector connector) + => _connector = connector ?? throw new ArgumentNullException(nameof(connector)); + + public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken) + => _connector.ParseAsync(context.Services, cancellationToken); +} + +internal sealed class CiscoMapJob : IJob +{ + private readonly CiscoConnector _connector; + + public CiscoMapJob(CiscoConnector connector) + => _connector = connector ?? throw new ArgumentNullException(nameof(connector)); + + public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken) + => _connector.MapAsync(context.Services, cancellationToken); +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/StellaOps.Feedser.Source.Vndr.Cisco.csproj b/src/StellaOps.Feedser.Source.Vndr.Cisco/StellaOps.Feedser.Source.Vndr.Cisco.csproj index f7f2c154..effa7961 100644 --- a/src/StellaOps.Feedser.Source.Vndr.Cisco/StellaOps.Feedser.Source.Vndr.Cisco.csproj +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/StellaOps.Feedser.Source.Vndr.Cisco.csproj @@ -6,11 +6,11 @@ enable - - - - - - - - + + + + + + + + diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/TASKS.md b/src/StellaOps.Feedser.Source.Vndr.Cisco/TASKS.md index 38257590..3df21054 100644 --- a/src/StellaOps.Feedser.Source.Vndr.Cisco/TASKS.md +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/TASKS.md @@ -2,10 +2,10 @@ | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |FEEDCONN-CISCO-02-001 Confirm Cisco PSIRT data source|BE-Conn-Cisco|Research|**DONE (2025-10-11)** – Selected openVuln REST API (`https://apix.cisco.com/security/advisories/v2/…`) as primary (structured JSON, CSAF/CVRF links) with RSS as fallback. Documented OAuth2 client-credentials flow (`cloudsso.cisco.com/as/token.oauth2`), baseline quotas (5 req/s, 30 req/min, 5 000 req/day), and pagination contract (`pageIndex`, `pageSize≤100`) in `docs/feedser-connector-research-20251011.md`.| -|FEEDCONN-CISCO-02-002 Fetch pipeline & state persistence|BE-Conn-Cisco|Source.Common, Storage.Mongo|**TODO** – Implement fetch job using shared OAuth token cache, honor `Retry-After` on 429, and persist raw advisory payloads + CSAF links. Cursor strategy: `lastUpdated` + advisory ID, with incremental filters (`/lastmodified/{YYYY-MM-DD}` or `/year/{YYYY}` + paging).| -|FEEDCONN-CISCO-02-003 Parser & DTO implementation|BE-Conn-Cisco|Source.Common|**TODO** – Map openVuln JSON fields (`advisoryId`, `advisoryTitle`, `cves`, `bugIDs`, `sir`, `productNames`, `version`, `cvssBaseScore`, `publicationUrl`, `cvrfUrl`, `csafUrl`). Normalize severity (SIR→Feedser severity), expand product list into affected packages, ingest CSAF where present to derive range primitives.| -|FEEDCONN-CISCO-02-004 Canonical mapping & range primitives|BE-Conn-Cisco|Models|**TODO** – Map advisories into canonical records with aliases, references, range primitives (SemVer/IOS/ASA versions). Sync scheme decisions and deadlines via `../StellaOps.Feedser.Merge/RANGE_PRIMITIVES_COORDINATION.md`.
    2025-10-11 research trail: baseline array `[{"scheme":"semver","type":"range","min":"","minInclusive":true,"max":"","maxInclusive":false,"notes":"cisco:psirt:advisory-id"}]`; if IOS-specific comparer is required, capture sample payload and raise Models issue before introducing a new `scheme`.| -|FEEDCONN-CISCO-02-005 Deterministic fixtures & tests|QA|Testing|**TODO** – Add fetch/parse/map regression tests; support `UPDATE_CISCO_FIXTURES=1`.| -|FEEDCONN-CISCO-02-006 Telemetry & documentation|DevEx|Docs|**TODO** – Add logging/metrics, document connector usage, update backlog when ready.| +|FEEDCONN-CISCO-02-002 Fetch pipeline & state persistence|BE-Conn-Cisco|Source.Common, Storage.Mongo|**DONE (2025-10-14)** – Fetch job now streams openVuln pages with OAuth bearer handler, honours 429 `Retry-After`, persists per-advisory JSON + metadata into GridFS, and updates cursor (`lastModified`, advisory ID, pending docs).| +|FEEDCONN-CISCO-02-003 Parser & DTO implementation|BE-Conn-Cisco|Source.Common|**DONE (2025-10-14)** – DTO factory normalizes SIR, folds CSAF product statuses, and persists `cisco.dto.v1` payloads (see `CiscoDtoFactory`).| +|FEEDCONN-CISCO-02-004 Canonical mapping & range primitives|BE-Conn-Cisco|Models|**DONE (2025-10-14)** – `CiscoMapper` emits canonical advisories with vendor + SemVer primitives, provenance, and status tags.| +|FEEDCONN-CISCO-02-005 Deterministic fixtures & tests|QA|Testing|**DONE (2025-10-14)** – Added unit tests (`StellaOps.Feedser.Source.Vndr.Cisco.Tests`) exercising DTO/mapper pipelines; `dotnet test` validated.| +|FEEDCONN-CISCO-02-006 Telemetry & documentation|DevEx|Docs|**DONE (2025-10-14)** – Cisco diagnostics counters exposed and ops runbook updated with telemetry guidance (`docs/ops/feedser-cisco-operations.md`).| |FEEDCONN-CISCO-02-007 API selection decision memo|BE-Conn-Cisco|Research|**DONE (2025-10-11)** – Drafted decision matrix: openVuln (structured/delta filters, OAuth throttle) vs RSS (delayed/minimal metadata). Pending OAuth onboarding (`FEEDCONN-CISCO-02-008`) before final recommendation circulated.| -|FEEDCONN-CISCO-02-008 OAuth client provisioning|Ops, BE-Conn-Cisco|Ops|**TODO** – Register openVuln application, capture client credential rotation steps, throttle limits, and Offline Kit secret distribution guidance.| +|FEEDCONN-CISCO-02-008 OAuth client provisioning|Ops, BE-Conn-Cisco|Ops|**DONE (2025-10-14)** – `docs/ops/feedser-cisco-operations.md` documents OAuth provisioning/rotation, quotas, and Offline Kit distribution guidance.| diff --git a/src/StellaOps.Feedser.Source.Vndr.Cisco/VndrCiscoConnectorPlugin.cs b/src/StellaOps.Feedser.Source.Vndr.Cisco/VndrCiscoConnectorPlugin.cs new file mode 100644 index 00000000..204da255 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Cisco/VndrCiscoConnectorPlugin.cs @@ -0,0 +1,21 @@ +using System; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Plugin; + +namespace StellaOps.Feedser.Source.Vndr.Cisco; + +public sealed class VndrCiscoConnectorPlugin : IConnectorPlugin +{ + public const string SourceName = "vndr-cisco"; + + public string Name => SourceName; + + public bool IsAvailable(IServiceProvider services) + => services.GetService() is not null; + + public IFeedConnector Create(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + return services.GetRequiredService(); + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc.Tests/Fixtures/msrc-detail.json b/src/StellaOps.Feedser.Source.Vndr.Msrc.Tests/Fixtures/msrc-detail.json new file mode 100644 index 00000000..b2463f28 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc.Tests/Fixtures/msrc-detail.json @@ -0,0 +1,44 @@ +{ + "id": "7a760e58-bd5f-4f37-8b87-1b61f2deb001", + "vulnerabilityId": "ADV123456", + "cveNumbers": [ + "CVE-2025-0001" + ], + "title": "Windows Kernel Elevation of Privilege Vulnerability", + "description": "An elevation of privilege vulnerability exists in the Windows kernel.", + "releaseDate": "2025-10-10T10:00:00Z", + "lastModifiedDate": "2025-10-14T11:00:00Z", + "severity": "Critical", + "threats": [ + { + "type": "Impact", + "description": "Elevation of Privilege", + "severity": "Important" + } + ], + "remediations": [ + { + "id": "1", + "type": "Security Update", + "description": "Install KB5031234 to address this vulnerability.", + "url": "https://support.microsoft.com/help/5031234", + "kbNumber": "KB5031234" + } + ], + "affectedProducts": [ + { + "productId": "Windows11-23H2-x64", + "productName": "Windows 11 Version 23H2 for x64-based Systems", + "platform": "Windows", + "architecture": "x64", + "buildNumber": "22631.3520", + "cpe": "cpe:/o:microsoft:windows_11:23H2" + } + ], + "cvssV3": { + "baseScore": 8.1, + "vectorString": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:H/A:H" + }, + "releaseNoteUrl": "https://msrc.microsoft.com/update-guide/vulnerability/ADV123456", + "cvrfUrl": "https://download.microsoft.com/msrc/2025/ADV123456.cvrf.zip" +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc.Tests/Fixtures/msrc-summary.json b/src/StellaOps.Feedser.Source.Vndr.Msrc.Tests/Fixtures/msrc-summary.json new file mode 100644 index 00000000..a49fefdf --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc.Tests/Fixtures/msrc-summary.json @@ -0,0 +1,17 @@ +{ + "value": [ + { + "id": "7a760e58-bd5f-4f37-8b87-1b61f2deb001", + "vulnerabilityId": "ADV123456", + "cveNumbers": [ + "CVE-2025-0001" + ], + "title": "Windows Kernel Elevation of Privilege Vulnerability", + "description": "An elevation of privilege vulnerability exists in the Windows kernel.", + "releaseDate": "2025-10-10T10:00:00Z", + "lastModifiedDate": "2025-10-14T11:00:00Z", + "severity": "Critical", + "cvrfUrl": "https://download.microsoft.com/msrc/2025/ADV123456.cvrf.zip" + } + ] +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc.Tests/MsrcConnectorTests.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc.Tests/MsrcConnectorTests.cs new file mode 100644 index 00000000..134d0261 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc.Tests/MsrcConnectorTests.cs @@ -0,0 +1,200 @@ +using System; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using FluentAssertions; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using MongoDB.Bson; +using StellaOps.Feedser.Source.Common.Fetch; +using StellaOps.Feedser.Source.Common; +using StellaOps.Feedser.Source.Common.Testing; +using StellaOps.Feedser.Source.Vndr.Msrc.Configuration; +using StellaOps.Feedser.Source.Vndr.Msrc.Internal; +using StellaOps.Feedser.Storage.Mongo; +using StellaOps.Feedser.Storage.Mongo.Advisories; +using StellaOps.Feedser.Storage.Mongo.Documents; +using StellaOps.Feedser.Storage.Mongo.Dtos; +using StellaOps.Feedser.Testing; +using Xunit; +using StellaOps.Feedser.Source.Common.Http; + +namespace StellaOps.Feedser.Source.Vndr.Msrc.Tests; + +[Collection("mongo-fixture")] +public sealed class MsrcConnectorTests : IAsyncLifetime +{ + private static readonly Uri TokenUri = new("https://login.microsoftonline.com/11111111-1111-1111-1111-111111111111/oauth2/v2.0/token"); + private static readonly Uri SummaryUri = new("https://api.msrc.microsoft.com/sug/v2.0/vulnerabilities"); + private static readonly Uri DetailUri = new("https://api.msrc.microsoft.com/sug/v2.0/vulnerability/ADV123456"); + + private readonly MongoIntegrationFixture _fixture; + private readonly CannedHttpMessageHandler _handler; + + public MsrcConnectorTests(MongoIntegrationFixture fixture) + { + _fixture = fixture; + _handler = new CannedHttpMessageHandler(); + } + + [Fact] + public async Task FetchParseMap_ProducesCanonicalAdvisory() + { + await using var provider = await BuildServiceProviderAsync(); + SeedResponses(); + + var connector = provider.GetRequiredService(); + await connector.FetchAsync(provider, CancellationToken.None); + await connector.ParseAsync(provider, CancellationToken.None); + await connector.MapAsync(provider, CancellationToken.None); + + var advisoryStore = provider.GetRequiredService(); + var advisories = await advisoryStore.GetRecentAsync(5, CancellationToken.None); + advisories.Should().HaveCount(1); + + var advisory = advisories[0]; + advisory.AdvisoryKey.Should().Be("ADV123456"); + advisory.Severity.Should().Be("critical"); + advisory.Aliases.Should().Contain("CVE-2025-0001"); + advisory.Aliases.Should().Contain("KB5031234"); + advisory.References.Should().Contain(reference => reference.Url == "https://msrc.microsoft.com/update-guide/vulnerability/ADV123456"); + advisory.References.Should().Contain(reference => reference.Url == "https://download.microsoft.com/msrc/2025/ADV123456.cvrf.zip"); + advisory.AffectedPackages.Should().HaveCount(1); + advisory.AffectedPackages[0].NormalizedVersions.Should().Contain(rule => rule.Scheme == "msrc.build" && rule.Value == "22631.3520"); + advisory.CvssMetrics.Should().Contain(metric => metric.BaseScore == 8.1); + + var stateRepository = provider.GetRequiredService(); + var state = await stateRepository.TryGetAsync(MsrcConnectorPlugin.SourceName, CancellationToken.None); + state.Should().NotBeNull(); + state!.Cursor.TryGetValue("pendingDocuments", out var pendingDocs).Should().BeTrue(); + pendingDocs!.AsBsonArray.Should().BeEmpty(); + + var documentStore = provider.GetRequiredService(); + var cvrfDocument = await documentStore.FindBySourceAndUriAsync(MsrcConnectorPlugin.SourceName, "https://download.microsoft.com/msrc/2025/ADV123456.cvrf.zip", CancellationToken.None); + cvrfDocument.Should().NotBeNull(); + cvrfDocument!.Status.Should().Be(DocumentStatuses.Mapped); + } + + private async Task BuildServiceProviderAsync() + { + await _fixture.Client.DropDatabaseAsync(_fixture.Database.DatabaseNamespace.DatabaseName); + _handler.Clear(); + + var services = new ServiceCollection(); + services.AddLogging(builder => builder.AddProvider(NullLoggerProvider.Instance)); + services.AddSingleton(_handler); + services.AddSingleton(TimeProvider.System); + + services.AddMongoStorage(options => + { + options.ConnectionString = _fixture.Runner.ConnectionString; + options.DatabaseName = _fixture.Database.DatabaseNamespace.DatabaseName; + options.CommandTimeout = TimeSpan.FromSeconds(5); + }); + + services.AddSourceCommon(); + services.AddMsrcConnector(options => + { + options.TenantId = "11111111-1111-1111-1111-111111111111"; + options.ClientId = "client-id"; + options.ClientSecret = "secret"; + options.InitialLastModified = new DateTimeOffset(2025, 10, 10, 0, 0, 0, TimeSpan.Zero); + options.RequestDelay = TimeSpan.Zero; + options.MaxAdvisoriesPerFetch = 10; + options.CursorOverlap = TimeSpan.FromMinutes(1); + options.DownloadCvrf = true; + }); + + services.Configure(MsrcOptions.HttpClientName, builderOptions => + { + builderOptions.HttpMessageHandlerBuilderActions.Add(builder => + { + builder.PrimaryHandler = _handler; + }); + }); + + services.Configure(MsrcOptions.TokenClientName, builderOptions => + { + builderOptions.HttpMessageHandlerBuilderActions.Add(builder => + { + builder.PrimaryHandler = _handler; + }); + }); + + var provider = services.BuildServiceProvider(); + var bootstrapper = provider.GetRequiredService(); + await bootstrapper.InitializeAsync(CancellationToken.None); + return provider; + } + + private void SeedResponses() + { + var summaryJson = ReadFixture("msrc-summary.json"); + var detailJson = ReadFixture("msrc-detail.json"); + var tokenJson = """{"token_type":"Bearer","expires_in":3600,"access_token":"fake-token"}"""; + var cvrfBytes = Encoding.UTF8.GetBytes("PK\x03\x04FAKECVRF"); + + _handler.SetFallback(request => + { + if (request.RequestUri is null) + { + return new HttpResponseMessage(HttpStatusCode.BadRequest); + } + + if (request.RequestUri.Host.Contains("login.microsoftonline.com", StringComparison.OrdinalIgnoreCase)) + { + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(tokenJson, Encoding.UTF8, "application/json"), + }; + } + + if (request.RequestUri.AbsolutePath.EndsWith("/vulnerabilities", StringComparison.OrdinalIgnoreCase)) + { + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(summaryJson, Encoding.UTF8, "application/json"), + }; + } + + if (request.RequestUri.AbsolutePath.Contains("/vulnerability/ADV123456", StringComparison.OrdinalIgnoreCase)) + { + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(detailJson, Encoding.UTF8, "application/json"), + }; + } + + if (request.RequestUri.Host.Contains("download.microsoft.com", StringComparison.OrdinalIgnoreCase)) + { + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(cvrfBytes) + { + Headers = + { + ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/zip"), + }, + }, + }; + } + + return new HttpResponseMessage(HttpStatusCode.NotFound) + { + Content = new StringContent($"No canned response for {request.RequestUri}", Encoding.UTF8), + }; + }); + } + + private static string ReadFixture(string fileName) + => System.IO.File.ReadAllText(System.IO.Path.Combine(AppContext.BaseDirectory, "Fixtures", fileName)); + + public Task InitializeAsync() => Task.CompletedTask; + + public Task DisposeAsync() => Task.CompletedTask; +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc.Tests/StellaOps.Feedser.Source.Vndr.Msrc.Tests.csproj b/src/StellaOps.Feedser.Source.Vndr.Msrc.Tests/StellaOps.Feedser.Source.Vndr.Msrc.Tests.csproj new file mode 100644 index 00000000..f3e0a677 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc.Tests/StellaOps.Feedser.Source.Vndr.Msrc.Tests.csproj @@ -0,0 +1,24 @@ + + + net10.0 + enable + enable + + + + + + + + + + + + + + + + PreserveNewest + + + diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/Class1.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/Class1.cs deleted file mode 100644 index efee818a..00000000 --- a/src/StellaOps.Feedser.Source.Vndr.Msrc/Class1.cs +++ /dev/null @@ -1,29 +0,0 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Plugin; - -namespace StellaOps.Feedser.Source.Vndr.Msrc; - -public sealed class VndrMsrcConnectorPlugin : IConnectorPlugin -{ - public string Name => "vndr-msrc"; - - public bool IsAvailable(IServiceProvider services) => true; - - public IFeedConnector Create(IServiceProvider services) => new StubConnector(Name); - - private sealed class StubConnector : IFeedConnector - { - public StubConnector(string sourceName) => SourceName = sourceName; - - public string SourceName { get; } - - public Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - - public Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - - public Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask; - } -} - diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/Configuration/MsrcOptions.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/Configuration/MsrcOptions.cs new file mode 100644 index 00000000..3e23fdf6 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/Configuration/MsrcOptions.cs @@ -0,0 +1,132 @@ +using System.Globalization; + +namespace StellaOps.Feedser.Source.Vndr.Msrc.Configuration; + +public sealed class MsrcOptions +{ + public const string HttpClientName = "feedser.source.vndr.msrc"; + public const string TokenClientName = "feedser.source.vndr.msrc.token"; + + public Uri BaseUri { get; set; } = new("https://api.msrc.microsoft.com/sug/v2.0/", UriKind.Absolute); + + public string Locale { get; set; } = "en-US"; + + public string ApiVersion { get; set; } = "2024-08-01"; + + /// + /// Azure AD tenant identifier used for client credential flow. + /// + public string TenantId { get; set; } = string.Empty; + + /// + /// Azure AD application (client) identifier. + /// + public string ClientId { get; set; } = string.Empty; + + /// + /// Azure AD client secret used for token acquisition. + /// + public string ClientSecret { get; set; } = string.Empty; + + /// + /// Scope requested during client-credential token acquisition. + /// + public string Scope { get; set; } = "api://api.msrc.microsoft.com/.default"; + + /// + /// Maximum advisories to fetch per cycle. + /// + public int MaxAdvisoriesPerFetch { get; set; } = 200; + + /// + /// Page size used when iterating the MSRC API. + /// + public int PageSize { get; set; } = 100; + + /// + /// Overlap window added when resuming from the last modified cursor. + /// + public TimeSpan CursorOverlap { get; set; } = TimeSpan.FromMinutes(10); + + /// + /// When enabled the connector downloads the CVRF artefact referenced by each advisory. + /// + public bool DownloadCvrf { get; set; } = false; + + public TimeSpan RequestDelay { get; set; } = TimeSpan.FromMilliseconds(250); + + public TimeSpan FailureBackoff { get; set; } = TimeSpan.FromMinutes(5); + + /// + /// Optional lower bound for the initial sync if the cursor is empty. + /// + public DateTimeOffset? InitialLastModified { get; set; } = DateTimeOffset.UtcNow.AddDays(-30); + + public void Validate() + { + if (BaseUri is null || !BaseUri.IsAbsoluteUri) + { + throw new InvalidOperationException("MSRC base URI must be absolute."); + } + + if (string.IsNullOrWhiteSpace(Locale)) + { + throw new InvalidOperationException("Locale must be provided."); + } + + if (!string.IsNullOrWhiteSpace(Locale) && !CultureInfo.GetCultures(CultureTypes.AllCultures).Any(c => string.Equals(c.Name, Locale, StringComparison.OrdinalIgnoreCase))) + { + throw new InvalidOperationException($"Locale '{Locale}' is not recognised."); + } + + if (string.IsNullOrWhiteSpace(ApiVersion)) + { + throw new InvalidOperationException("API version must be provided."); + } + + if (!Guid.TryParse(TenantId, out _)) + { + throw new InvalidOperationException("TenantId must be a valid GUID."); + } + + if (string.IsNullOrWhiteSpace(ClientId)) + { + throw new InvalidOperationException("ClientId must be provided."); + } + + if (string.IsNullOrWhiteSpace(ClientSecret)) + { + throw new InvalidOperationException("ClientSecret must be provided."); + } + + if (string.IsNullOrWhiteSpace(Scope)) + { + throw new InvalidOperationException("Scope must be provided."); + } + + if (MaxAdvisoriesPerFetch <= 0) + { + throw new InvalidOperationException($"{nameof(MaxAdvisoriesPerFetch)} must be greater than zero."); + } + + if (PageSize <= 0 || PageSize > 500) + { + throw new InvalidOperationException($"{nameof(PageSize)} must be between 1 and 500."); + } + + if (CursorOverlap < TimeSpan.Zero || CursorOverlap > TimeSpan.FromHours(6)) + { + throw new InvalidOperationException($"{nameof(CursorOverlap)} must be within 0-6 hours."); + } + + if (RequestDelay < TimeSpan.Zero) + { + throw new InvalidOperationException($"{nameof(RequestDelay)} cannot be negative."); + } + + if (FailureBackoff <= TimeSpan.Zero) + { + throw new InvalidOperationException($"{nameof(FailureBackoff)} must be positive."); + } + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcAdvisoryDto.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcAdvisoryDto.cs new file mode 100644 index 00000000..a5b0b61b --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcAdvisoryDto.cs @@ -0,0 +1,49 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Feedser.Source.Vndr.Msrc.Internal; + +public sealed record MsrcAdvisoryDto +{ + public string AdvisoryId { get; init; } = string.Empty; + + public string Title { get; init; } = string.Empty; + + public string? Description { get; init; } + + public string? Severity { get; init; } + + public DateTimeOffset? ReleaseDate { get; init; } + + public DateTimeOffset? LastModifiedDate { get; init; } + + public IReadOnlyList CveIds { get; init; } = Array.Empty(); + + public IReadOnlyList KbIds { get; init; } = Array.Empty(); + + public IReadOnlyList Threats { get; init; } = Array.Empty(); + + public IReadOnlyList Remediations { get; init; } = Array.Empty(); + + public IReadOnlyList Products { get; init; } = Array.Empty(); + + public double? CvssBaseScore { get; init; } + + public string? CvssVector { get; init; } + + public string? ReleaseNoteUrl { get; init; } + + public string? CvrfUrl { get; init; } +} + +public sealed record MsrcAdvisoryThreat(string Type, string? Description, string? Severity); + +public sealed record MsrcAdvisoryRemediation(string Type, string? Description, string? Url, string? Kb); + +public sealed record MsrcAdvisoryProduct( + string Identifier, + string? ProductName, + string? Platform, + string? Architecture, + string? BuildNumber, + string? Cpe); diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcApiClient.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcApiClient.cs new file mode 100644 index 00000000..9a4fa7e6 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcApiClient.cs @@ -0,0 +1,138 @@ +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Net.Http.Json; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Feedser.Source.Vndr.Msrc.Configuration; + +namespace StellaOps.Feedser.Source.Vndr.Msrc.Internal; + +public sealed class MsrcApiClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + WriteIndented = false, + }; + + private readonly IHttpClientFactory _httpClientFactory; + private readonly IMsrcTokenProvider _tokenProvider; + private readonly MsrcOptions _options; + private readonly ILogger _logger; + + public MsrcApiClient( + IHttpClientFactory httpClientFactory, + IMsrcTokenProvider tokenProvider, + IOptions options, + ILogger logger) + { + _httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); + _tokenProvider = tokenProvider ?? throw new ArgumentNullException(nameof(tokenProvider)); + _options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task> FetchSummariesAsync(DateTimeOffset fromInclusive, DateTimeOffset toExclusive, CancellationToken cancellationToken) + { + var client = await CreateAuthenticatedClientAsync(cancellationToken).ConfigureAwait(false); + + var results = new List(); + var requestUri = BuildSummaryUri(fromInclusive, toExclusive); + + while (requestUri is not null) + { + using var request = new HttpRequestMessage(HttpMethod.Get, requestUri); + using var response = await client.SendAsync(request, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var preview = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new HttpRequestException($"MSRC summary fetch failed with {(int)response.StatusCode}. Body: {preview}"); + } + + var payload = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false) + ?? new MsrcSummaryResponse(); + + results.AddRange(payload.Value); + + if (string.IsNullOrWhiteSpace(payload.NextLink)) + { + break; + } + + requestUri = new Uri(payload.NextLink, UriKind.Absolute); + } + + return results; + } + + public Uri BuildDetailUri(string vulnerabilityId) + { + var uri = CreateDetailUriInternal(vulnerabilityId); + return uri; + } + + public async Task FetchDetailAsync(string vulnerabilityId, CancellationToken cancellationToken) + { + var client = await CreateAuthenticatedClientAsync(cancellationToken).ConfigureAwait(false); + var uri = CreateDetailUriInternal(vulnerabilityId); + + using var request = new HttpRequestMessage(HttpMethod.Get, uri); + using var response = await client.SendAsync(request, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var preview = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new HttpRequestException($"MSRC detail fetch failed for {vulnerabilityId} with {(int)response.StatusCode}. Body: {preview}"); + } + + return await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); + } + + private async Task CreateAuthenticatedClientAsync(CancellationToken cancellationToken) + { + var token = await _tokenProvider.GetAccessTokenAsync(cancellationToken).ConfigureAwait(false); + var client = _httpClientFactory.CreateClient(MsrcOptions.HttpClientName); + client.DefaultRequestHeaders.Remove("Authorization"); + client.DefaultRequestHeaders.Add("Authorization", $"Bearer {token}"); + client.DefaultRequestHeaders.Remove("Accept"); + client.DefaultRequestHeaders.Add("Accept", "application/json"); + client.DefaultRequestHeaders.Remove("api-version"); + client.DefaultRequestHeaders.Add("api-version", _options.ApiVersion); + client.DefaultRequestHeaders.Remove("Accept-Language"); + client.DefaultRequestHeaders.Add("Accept-Language", _options.Locale); + return client; + } + + private Uri BuildSummaryUri(DateTimeOffset fromInclusive, DateTimeOffset toExclusive) + { + var builder = new StringBuilder(); + builder.Append(_options.BaseUri.ToString().TrimEnd('/')); + builder.Append("/vulnerabilities?"); + builder.Append("$top=").Append(_options.PageSize); + builder.Append("&lastModifiedStartDateTime=").Append(Uri.EscapeDataString(fromInclusive.ToUniversalTime().ToString("O"))); + builder.Append("&lastModifiedEndDateTime=").Append(Uri.EscapeDataString(toExclusive.ToUniversalTime().ToString("O"))); + builder.Append("&$orderby=lastModifiedDate"); + builder.Append("&locale=").Append(Uri.EscapeDataString(_options.Locale)); + builder.Append("&api-version=").Append(Uri.EscapeDataString(_options.ApiVersion)); + + return new Uri(builder.ToString(), UriKind.Absolute); + } + + private Uri CreateDetailUriInternal(string vulnerabilityId) + { + if (string.IsNullOrWhiteSpace(vulnerabilityId)) + { + throw new ArgumentException("Vulnerability identifier must be provided.", nameof(vulnerabilityId)); + } + + var baseUri = _options.BaseUri.ToString().TrimEnd('/'); + var path = $"{baseUri}/vulnerability/{Uri.EscapeDataString(vulnerabilityId)}?api-version={Uri.EscapeDataString(_options.ApiVersion)}&locale={Uri.EscapeDataString(_options.Locale)}"; + return new Uri(path, UriKind.Absolute); + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcCursor.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcCursor.cs new file mode 100644 index 00000000..cc1e0763 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcCursor.cs @@ -0,0 +1,87 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Bson; + +namespace StellaOps.Feedser.Source.Vndr.Msrc.Internal; + +internal sealed record MsrcCursor( + IReadOnlyCollection PendingDocuments, + IReadOnlyCollection PendingMappings, + DateTimeOffset? LastModifiedCursor) +{ + private static readonly IReadOnlyCollection EmptyGuidSet = Array.Empty(); + + public static MsrcCursor Empty { get; } = new(EmptyGuidSet, EmptyGuidSet, null); + + public MsrcCursor WithPendingDocuments(IEnumerable documents) + => this with { PendingDocuments = Distinct(documents) }; + + public MsrcCursor WithPendingMappings(IEnumerable mappings) + => this with { PendingMappings = Distinct(mappings) }; + + public MsrcCursor WithLastModifiedCursor(DateTimeOffset? timestamp) + => this with { LastModifiedCursor = timestamp }; + + public BsonDocument ToBsonDocument() + { + var document = new BsonDocument + { + ["pendingDocuments"] = new BsonArray(PendingDocuments.Select(id => id.ToString())), + ["pendingMappings"] = new BsonArray(PendingMappings.Select(id => id.ToString())), + }; + + if (LastModifiedCursor.HasValue) + { + document["lastModifiedCursor"] = LastModifiedCursor.Value.UtcDateTime; + } + + return document; + } + + public static MsrcCursor FromBson(BsonDocument? document) + { + if (document is null || document.ElementCount == 0) + { + return Empty; + } + + var pendingDocuments = ReadGuidArray(document, "pendingDocuments"); + var pendingMappings = ReadGuidArray(document, "pendingMappings"); + var lastModified = document.TryGetValue("lastModifiedCursor", out var value) + ? ParseDate(value) + : null; + + return new MsrcCursor(pendingDocuments, pendingMappings, lastModified); + } + + private static IReadOnlyCollection Distinct(IEnumerable? values) + => values?.Distinct().ToArray() ?? EmptyGuidSet; + + private static IReadOnlyCollection ReadGuidArray(BsonDocument document, string field) + { + if (!document.TryGetValue(field, out var value) || value is not BsonArray array) + { + return EmptyGuidSet; + } + + var items = new List(array.Count); + foreach (var element in array) + { + if (Guid.TryParse(element?.ToString(), out var id)) + { + items.Add(id); + } + } + + return items; + } + + private static DateTimeOffset? ParseDate(BsonValue value) + => value.BsonType switch + { + BsonType.DateTime => DateTime.SpecifyKind(value.ToUniversalTime(), DateTimeKind.Utc), + BsonType.String when DateTimeOffset.TryParse(value.AsString, out var parsed) => parsed.ToUniversalTime(), + _ => null, + }; +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcDetailDto.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcDetailDto.cs new file mode 100644 index 00000000..7526e232 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcDetailDto.cs @@ -0,0 +1,113 @@ +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Feedser.Source.Vndr.Msrc.Internal; + +public sealed record MsrcVulnerabilityDetailDto +{ + [JsonPropertyName("id")] + public string Id { get; init; } = string.Empty; + + [JsonPropertyName("vulnerabilityId")] + public string VulnerabilityId { get; init; } = string.Empty; + + [JsonPropertyName("cveNumber")] + public string? CveNumber { get; init; } + + [JsonPropertyName("cveNumbers")] + public IReadOnlyList CveNumbers { get; init; } = Array.Empty(); + + [JsonPropertyName("title")] + public string Title { get; init; } = string.Empty; + + [JsonPropertyName("description")] + public string? Description { get; init; } + + [JsonPropertyName("releaseDate")] + public DateTimeOffset? ReleaseDate { get; init; } + + [JsonPropertyName("lastModifiedDate")] + public DateTimeOffset? LastModifiedDate { get; init; } + + [JsonPropertyName("severity")] + public string? Severity { get; init; } + + [JsonPropertyName("threats")] + public IReadOnlyList Threats { get; init; } = Array.Empty(); + + [JsonPropertyName("remediations")] + public IReadOnlyList Remediations { get; init; } = Array.Empty(); + + [JsonPropertyName("affectedProducts")] + public IReadOnlyList AffectedProducts { get; init; } = Array.Empty(); + + [JsonPropertyName("cvssV3")] + public MsrcCvssDto? Cvss { get; init; } + + [JsonPropertyName("releaseNoteUrl")] + public string? ReleaseNoteUrl { get; init; } + + [JsonPropertyName("cvrfUrl")] + public string? CvrfUrl { get; init; } +} + +public sealed record MsrcThreatDto +{ + [JsonPropertyName("type")] + public string? Type { get; init; } + + [JsonPropertyName("description")] + public string? Description { get; init; } + + [JsonPropertyName("severity")] + public string? Severity { get; init; } +} + +public sealed record MsrcRemediationDto +{ + [JsonPropertyName("id")] + public string? Id { get; init; } + + [JsonPropertyName("type")] + public string? Type { get; init; } + + [JsonPropertyName("description")] + public string? Description { get; init; } + + [JsonPropertyName("url")] + public string? Url { get; init; } + + [JsonPropertyName("kbNumber")] + public string? KbNumber { get; init; } +} + +public sealed record MsrcAffectedProductDto +{ + [JsonPropertyName("productId")] + public string? ProductId { get; init; } + + [JsonPropertyName("productName")] + public string? ProductName { get; init; } + + [JsonPropertyName("cpe")] + public string? Cpe { get; init; } + + [JsonPropertyName("platform")] + public string? Platform { get; init; } + + [JsonPropertyName("architecture")] + public string? Architecture { get; init; } + + [JsonPropertyName("buildNumber")] + public string? BuildNumber { get; init; } +} + +public sealed record MsrcCvssDto +{ + [JsonPropertyName("baseScore")] + public double? BaseScore { get; init; } + + [JsonPropertyName("vectorString")] + public string? VectorString { get; init; } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcDetailParser.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcDetailParser.cs new file mode 100644 index 00000000..a684599d --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcDetailParser.cs @@ -0,0 +1,71 @@ +using System; +using System.Linq; + +namespace StellaOps.Feedser.Source.Vndr.Msrc.Internal; + +public sealed class MsrcDetailParser +{ + public MsrcAdvisoryDto Parse(MsrcVulnerabilityDetailDto detail) + { + ArgumentNullException.ThrowIfNull(detail); + + var advisoryId = string.IsNullOrWhiteSpace(detail.VulnerabilityId) ? detail.Id : detail.VulnerabilityId; + var cveIds = detail.CveNumbers?.Where(static c => !string.IsNullOrWhiteSpace(c)).Select(static c => c.Trim()).ToArray() + ?? (string.IsNullOrWhiteSpace(detail.CveNumber) ? Array.Empty() : new[] { detail.CveNumber! }); + + var kbIds = detail.Remediations? + .Where(static remediation => !string.IsNullOrWhiteSpace(remediation.KbNumber)) + .Select(static remediation => remediation.KbNumber!.Trim()) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray() ?? Array.Empty(); + + return new MsrcAdvisoryDto + { + AdvisoryId = advisoryId, + Title = string.IsNullOrWhiteSpace(detail.Title) ? advisoryId : detail.Title.Trim(), + Description = detail.Description, + Severity = detail.Severity, + ReleaseDate = detail.ReleaseDate, + LastModifiedDate = detail.LastModifiedDate, + CveIds = cveIds, + KbIds = kbIds, + Threats = detail.Threats?.Select(static threat => new MsrcAdvisoryThreat( + threat.Type ?? "unspecified", + threat.Description, + threat.Severity)).ToArray() ?? Array.Empty(), + Remediations = detail.Remediations?.Select(static remediation => new MsrcAdvisoryRemediation( + remediation.Type ?? "unspecified", + remediation.Description, + remediation.Url, + remediation.KbNumber)).ToArray() ?? Array.Empty(), + Products = detail.AffectedProducts?.Select(product => + new MsrcAdvisoryProduct( + BuildProductIdentifier(product), + product.ProductName, + product.Platform, + product.Architecture, + product.BuildNumber, + product.Cpe)).ToArray() ?? Array.Empty(), + CvssBaseScore = detail.Cvss?.BaseScore, + CvssVector = detail.Cvss?.VectorString, + ReleaseNoteUrl = detail.ReleaseNoteUrl, + CvrfUrl = detail.CvrfUrl, + }; + } + + private static string BuildProductIdentifier(MsrcAffectedProductDto product) + { + var name = string.IsNullOrWhiteSpace(product.ProductName) ? product.ProductId : product.ProductName; + if (string.IsNullOrWhiteSpace(name)) + { + name = "Unknown Product"; + } + + if (!string.IsNullOrWhiteSpace(product.BuildNumber)) + { + return $"{name} build {product.BuildNumber}"; + } + + return name; + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcDiagnostics.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcDiagnostics.cs new file mode 100644 index 00000000..00ada34d --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcDiagnostics.cs @@ -0,0 +1,129 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.Metrics; + +namespace StellaOps.Feedser.Source.Vndr.Msrc.Internal; + +public sealed class MsrcDiagnostics : IDisposable +{ + private const string MeterName = "StellaOps.Feedser.Source.Vndr.Msrc"; + private const string MeterVersion = "1.0.0"; + + private readonly Meter _meter; + private readonly Counter _summaryFetchAttempts; + private readonly Counter _summaryFetchSuccess; + private readonly Counter _summaryFetchFailures; + private readonly Histogram _summaryItemCount; + private readonly Histogram _summaryWindowHours; + private readonly Counter _detailFetchAttempts; + private readonly Counter _detailFetchSuccess; + private readonly Counter _detailFetchNotModified; + private readonly Counter _detailFetchFailures; + private readonly Histogram _detailEnqueued; + private readonly Counter _parseSuccess; + private readonly Counter _parseFailures; + private readonly Histogram _parseProductCount; + private readonly Histogram _parseKbCount; + private readonly Counter _mapSuccess; + private readonly Counter _mapFailures; + private readonly Histogram _mapAliasCount; + private readonly Histogram _mapAffectedCount; + + public MsrcDiagnostics() + { + _meter = new Meter(MeterName, MeterVersion); + _summaryFetchAttempts = _meter.CreateCounter("msrc.summary.fetch.attempts", "operations"); + _summaryFetchSuccess = _meter.CreateCounter("msrc.summary.fetch.success", "operations"); + _summaryFetchFailures = _meter.CreateCounter("msrc.summary.fetch.failures", "operations"); + _summaryItemCount = _meter.CreateHistogram("msrc.summary.items.count", "items"); + _summaryWindowHours = _meter.CreateHistogram("msrc.summary.window.hours", "hours"); + _detailFetchAttempts = _meter.CreateCounter("msrc.detail.fetch.attempts", "operations"); + _detailFetchSuccess = _meter.CreateCounter("msrc.detail.fetch.success", "operations"); + _detailFetchNotModified = _meter.CreateCounter("msrc.detail.fetch.not_modified", "operations"); + _detailFetchFailures = _meter.CreateCounter("msrc.detail.fetch.failures", "operations"); + _detailEnqueued = _meter.CreateHistogram("msrc.detail.enqueued.count", "documents"); + _parseSuccess = _meter.CreateCounter("msrc.parse.success", "documents"); + _parseFailures = _meter.CreateCounter("msrc.parse.failures", "documents"); + _parseProductCount = _meter.CreateHistogram("msrc.parse.products.count", "products"); + _parseKbCount = _meter.CreateHistogram("msrc.parse.kb.count", "kb"); + _mapSuccess = _meter.CreateCounter("msrc.map.success", "advisories"); + _mapFailures = _meter.CreateCounter("msrc.map.failures", "advisories"); + _mapAliasCount = _meter.CreateHistogram("msrc.map.aliases.count", "aliases"); + _mapAffectedCount = _meter.CreateHistogram("msrc.map.affected.count", "packages"); + } + + public void SummaryFetchAttempt() => _summaryFetchAttempts.Add(1); + + public void SummaryFetchSuccess(int count, double? windowHours) + { + _summaryFetchSuccess.Add(1); + if (count >= 0) + { + _summaryItemCount.Record(count); + } + + if (windowHours is { } value && value >= 0) + { + _summaryWindowHours.Record(value); + } + } + + public void SummaryFetchFailure(string reason) + => _summaryFetchFailures.Add(1, ReasonTag(reason)); + + public void DetailFetchAttempt() => _detailFetchAttempts.Add(1); + + public void DetailFetchSuccess() => _detailFetchSuccess.Add(1); + + public void DetailFetchNotModified() => _detailFetchNotModified.Add(1); + + public void DetailFetchFailure(string reason) + => _detailFetchFailures.Add(1, ReasonTag(reason)); + + public void DetailEnqueued(int count) + { + if (count >= 0) + { + _detailEnqueued.Record(count); + } + } + + public void ParseSuccess(int productCount, int kbCount) + { + _parseSuccess.Add(1); + if (productCount >= 0) + { + _parseProductCount.Record(productCount); + } + + if (kbCount >= 0) + { + _parseKbCount.Record(kbCount); + } + } + + public void ParseFailure(string reason) + => _parseFailures.Add(1, ReasonTag(reason)); + + public void MapSuccess(int aliasCount, int packageCount) + { + _mapSuccess.Add(1); + if (aliasCount >= 0) + { + _mapAliasCount.Record(aliasCount); + } + + if (packageCount >= 0) + { + _mapAffectedCount.Record(packageCount); + } + } + + public void MapFailure(string reason) + => _mapFailures.Add(1, ReasonTag(reason)); + + private static KeyValuePair ReasonTag(string reason) + => new("reason", string.IsNullOrWhiteSpace(reason) ? "unknown" : reason.ToLowerInvariant()); + + public void Dispose() => _meter.Dispose(); +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcDocumentMetadata.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcDocumentMetadata.cs new file mode 100644 index 00000000..26d0528d --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcDocumentMetadata.cs @@ -0,0 +1,45 @@ +using System; +using System.Collections.Generic; + +namespace StellaOps.Feedser.Source.Vndr.Msrc.Internal; + +internal static class MsrcDocumentMetadata +{ + public static Dictionary CreateMetadata(MsrcVulnerabilitySummary summary) + { + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["msrc.vulnerabilityId"] = summary.VulnerabilityId ?? summary.Id, + ["msrc.id"] = summary.Id, + }; + + if (summary.LastModifiedDate.HasValue) + { + metadata["msrc.lastModified"] = summary.LastModifiedDate.Value.ToString("O"); + } + + if (summary.ReleaseDate.HasValue) + { + metadata["msrc.releaseDate"] = summary.ReleaseDate.Value.ToString("O"); + } + + if (!string.IsNullOrWhiteSpace(summary.CvrfUrl)) + { + metadata["msrc.cvrfUrl"] = summary.CvrfUrl!; + } + + if (summary.CveNumbers.Count > 0) + { + metadata["msrc.cves"] = string.Join(",", summary.CveNumbers); + } + + return metadata; + } + + public static Dictionary CreateCvrfMetadata(MsrcVulnerabilitySummary summary) + { + var metadata = CreateMetadata(summary); + metadata["msrc.cvrf"] = "true"; + return metadata; + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcMapper.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcMapper.cs new file mode 100644 index 00000000..ff507360 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcMapper.cs @@ -0,0 +1,239 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using StellaOps.Feedser.Models; +using StellaOps.Feedser.Storage.Mongo.Documents; + +namespace StellaOps.Feedser.Source.Vndr.Msrc.Internal; + +internal static class MsrcMapper +{ + public static Advisory Map(MsrcAdvisoryDto dto, DocumentRecord document, DateTimeOffset recordedAt) + { + ArgumentNullException.ThrowIfNull(dto); + ArgumentNullException.ThrowIfNull(document); + + var advisoryKey = dto.AdvisoryId; + var aliases = BuildAliases(dto); + var references = BuildReferences(dto, recordedAt); + var affectedPackages = BuildPackages(dto, recordedAt); + var cvssMetrics = BuildCvss(dto, recordedAt); + + var provenance = new AdvisoryProvenance( + source: MsrcConnectorPlugin.SourceName, + kind: "advisory", + value: advisoryKey, + recordedAt, + new[] { ProvenanceFieldMasks.Advisory }); + + return new Advisory( + advisoryKey: advisoryKey, + title: dto.Title, + summary: dto.Description, + language: "en", + published: dto.ReleaseDate, + modified: dto.LastModifiedDate, + severity: NormalizeSeverity(dto.Severity), + exploitKnown: false, + aliases: aliases, + references: references, + affectedPackages: affectedPackages, + cvssMetrics: cvssMetrics, + provenance: new[] { provenance }); + } + + private static IReadOnlyList BuildAliases(MsrcAdvisoryDto dto) + { + var aliases = new List { dto.AdvisoryId }; + foreach (var cve in dto.CveIds) + { + if (!string.IsNullOrWhiteSpace(cve)) + { + aliases.Add(cve); + } + } + + foreach (var kb in dto.KbIds) + { + if (!string.IsNullOrWhiteSpace(kb)) + { + aliases.Add(kb.StartsWith("KB", StringComparison.OrdinalIgnoreCase) ? kb : $"KB{kb}"); + } + } + + return aliases + .Where(static alias => !string.IsNullOrWhiteSpace(alias)) + .Distinct(StringComparer.OrdinalIgnoreCase) + .OrderBy(static alias => alias, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static IReadOnlyList BuildReferences(MsrcAdvisoryDto dto, DateTimeOffset recordedAt) + { + var references = new List(); + + if (!string.IsNullOrWhiteSpace(dto.ReleaseNoteUrl)) + { + references.Add(CreateReference(dto.ReleaseNoteUrl!, "details", recordedAt)); + } + + if (!string.IsNullOrWhiteSpace(dto.CvrfUrl)) + { + references.Add(CreateReference(dto.CvrfUrl!, "cvrf", recordedAt)); + } + + foreach (var remediation in dto.Remediations) + { + if (!string.IsNullOrWhiteSpace(remediation.Url)) + { + references.Add(CreateReference( + remediation.Url!, + string.Equals(remediation.Type, "security update", StringComparison.OrdinalIgnoreCase) ? "remediation" : remediation.Type ?? "reference", + recordedAt, + remediation.Description)); + } + } + + return references + .DistinctBy(static reference => reference.Url, StringComparer.OrdinalIgnoreCase) + .OrderBy(static reference => reference.Url, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static AdvisoryReference CreateReference(string url, string kind, DateTimeOffset recordedAt, string? summary = null) + => new( + url, + kind: kind.ToLowerInvariant(), + sourceTag: "msrc", + summary: summary, + provenance: new AdvisoryProvenance( + MsrcConnectorPlugin.SourceName, + "reference", + url, + recordedAt, + new[] { ProvenanceFieldMasks.References })); + + private static IReadOnlyList BuildPackages(MsrcAdvisoryDto dto, DateTimeOffset recordedAt) + { + if (dto.Products.Count == 0) + { + return Array.Empty(); + } + + var packages = new List(dto.Products.Count); + foreach (var product in dto.Products) + { + var identifier = string.IsNullOrWhiteSpace(product.Identifier) ? "Unknown Product" : product.Identifier; + var provenance = new AdvisoryProvenance( + MsrcConnectorPlugin.SourceName, + "package", + identifier, + recordedAt, + new[] { ProvenanceFieldMasks.AffectedPackages }); + + var notes = new List(); + if (!string.IsNullOrWhiteSpace(product.Platform)) + { + notes.Add($"platform:{product.Platform}"); + } + + if (!string.IsNullOrWhiteSpace(product.Architecture)) + { + notes.Add($"arch:{product.Architecture}"); + } + + if (!string.IsNullOrWhiteSpace(product.Cpe)) + { + notes.Add($"cpe:{product.Cpe}"); + } + + var range = !string.IsNullOrWhiteSpace(product.BuildNumber) + ? new[] + { + new AffectedVersionRange( + rangeKind: "custom", + introducedVersion: null, + fixedVersion: null, + lastAffectedVersion: null, + rangeExpression: $"build:{product.BuildNumber}", + provenance: new AdvisoryProvenance( + MsrcConnectorPlugin.SourceName, + "package-range", + identifier, + recordedAt, + new[] { ProvenanceFieldMasks.VersionRanges })), + } + : Array.Empty(); + + var normalizedRules = !string.IsNullOrWhiteSpace(product.BuildNumber) + ? new[] + { + new NormalizedVersionRule( + scheme: "msrc.build", + type: NormalizedVersionRuleTypes.Exact, + value: product.BuildNumber, + notes: string.Join(";", notes.Where(static n => !string.IsNullOrWhiteSpace(n)))) + } + : Array.Empty(); + + packages.Add(new AffectedPackage( + type: AffectedPackageTypes.Vendor, + identifier: identifier, + platform: product.Platform, + versionRanges: range, + statuses: Array.Empty(), + provenance: new[] { provenance }, + normalizedVersions: normalizedRules)); + } + + return packages + .DistinctBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase) + .OrderBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static IReadOnlyList BuildCvss(MsrcAdvisoryDto dto, DateTimeOffset recordedAt) + { + if (dto.CvssBaseScore is null || string.IsNullOrWhiteSpace(dto.CvssVector)) + { + return Array.Empty(); + } + + var severity = CvssSeverityFromScore(dto.CvssBaseScore.Value); + + return new[] + { + new CvssMetric( + version: "3.1", + vector: dto.CvssVector!, + baseScore: dto.CvssBaseScore.Value, + baseSeverity: severity, + provenance: new AdvisoryProvenance( + MsrcConnectorPlugin.SourceName, + "cvss", + dto.AdvisoryId, + recordedAt, + new[] { ProvenanceFieldMasks.CvssMetrics })), + }; + } + + private static string CvssSeverityFromScore(double score) + => score switch + { + < 0 => "none", + < 4 => "low", + < 7 => "medium", + < 9 => "high", + _ => "critical", + }; + + private static string? NormalizeSeverity(string? severity) + { + if (string.IsNullOrWhiteSpace(severity)) + { + return null; + } + + return severity.Trim().ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcSummaryResponse.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcSummaryResponse.cs new file mode 100644 index 00000000..a8fadee1 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcSummaryResponse.cs @@ -0,0 +1,47 @@ +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Feedser.Source.Vndr.Msrc.Internal; + +public sealed record MsrcSummaryResponse +{ + [JsonPropertyName("value")] + public List Value { get; init; } = new(); + + [JsonPropertyName("@odata.nextLink")] + public string? NextLink { get; init; } +} + +public sealed record MsrcVulnerabilitySummary +{ + [JsonPropertyName("id")] + public string Id { get; init; } = string.Empty; + + [JsonPropertyName("vulnerabilityId")] + public string? VulnerabilityId { get; init; } + + [JsonPropertyName("cveNumber")] + public string? CveNumber { get; init; } + + [JsonPropertyName("cveNumbers")] + public IReadOnlyList CveNumbers { get; init; } = Array.Empty(); + + [JsonPropertyName("title")] + public string? Title { get; init; } + + [JsonPropertyName("description")] + public string? Description { get; init; } + + [JsonPropertyName("releaseDate")] + public DateTimeOffset? ReleaseDate { get; init; } + + [JsonPropertyName("lastModifiedDate")] + public DateTimeOffset? LastModifiedDate { get; init; } + + [JsonPropertyName("severity")] + public string? Severity { get; init; } + + [JsonPropertyName("cvrfUrl")] + public string? CvrfUrl { get; init; } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcTokenProvider.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcTokenProvider.cs new file mode 100644 index 00000000..8511a3c8 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/Internal/MsrcTokenProvider.cs @@ -0,0 +1,106 @@ +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Net.Http.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Feedser.Source.Vndr.Msrc.Configuration; + +namespace StellaOps.Feedser.Source.Vndr.Msrc.Internal; + +public interface IMsrcTokenProvider +{ + Task GetAccessTokenAsync(CancellationToken cancellationToken); +} + +public sealed class MsrcTokenProvider : IMsrcTokenProvider, IDisposable +{ + private readonly IHttpClientFactory _httpClientFactory; + private readonly MsrcOptions _options; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + private readonly SemaphoreSlim _refreshLock = new(1, 1); + + private AccessToken? _currentToken; + + public MsrcTokenProvider( + IHttpClientFactory httpClientFactory, + IOptions options, + TimeProvider? timeProvider, + ILogger logger) + { + _httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); + _options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options)); + _options.Validate(); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task GetAccessTokenAsync(CancellationToken cancellationToken) + { + var token = _currentToken; + if (token is not null && !token.IsExpired(_timeProvider.GetUtcNow())) + { + return token.Token; + } + + await _refreshLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + token = _currentToken; + if (token is not null && !token.IsExpired(_timeProvider.GetUtcNow())) + { + return token.Token; + } + + _logger.LogInformation("Requesting new MSRC access token"); + var client = _httpClientFactory.CreateClient(MsrcOptions.TokenClientName); + var request = new HttpRequestMessage(HttpMethod.Post, BuildTokenUri()) + { + Content = new FormUrlEncodedContent(new Dictionary + { + ["client_id"] = _options.ClientId, + ["client_secret"] = _options.ClientSecret, + ["grant_type"] = "client_credentials", + ["scope"] = _options.Scope, + }), + }; + + using var response = await client.SendAsync(request, cancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + var payload = await response.Content.ReadFromJsonAsync(cancellationToken: cancellationToken).ConfigureAwait(false) + ?? throw new InvalidOperationException("AAD token response was null."); + + var expiresAt = _timeProvider.GetUtcNow().AddSeconds(payload.ExpiresIn - 60); + _currentToken = new AccessToken(payload.AccessToken, expiresAt); + return payload.AccessToken; + } + finally + { + _refreshLock.Release(); + } + } + + private Uri BuildTokenUri() + => new($"https://login.microsoftonline.com/{_options.TenantId}/oauth2/v2.0/token"); + + public void Dispose() => _refreshLock.Dispose(); + + private sealed record AccessToken(string Token, DateTimeOffset ExpiresAt) + { + public bool IsExpired(DateTimeOffset now) => now >= ExpiresAt; + } + + private sealed record TokenResponse + { + [JsonPropertyName("access_token")] + public string AccessToken { get; init; } = string.Empty; + + [JsonPropertyName("expires_in")] + public int ExpiresIn { get; init; } + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/Jobs.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/Jobs.cs new file mode 100644 index 00000000..9e618b6c --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/Jobs.cs @@ -0,0 +1,22 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Feedser.Core.Jobs; + +namespace StellaOps.Feedser.Source.Vndr.Msrc; + +internal static class MsrcJobKinds +{ + public const string Fetch = "source:vndr.msrc:fetch"; +} + +internal sealed class MsrcFetchJob : IJob +{ + private readonly MsrcConnector _connector; + + public MsrcFetchJob(MsrcConnector connector) + => _connector = connector ?? throw new ArgumentNullException(nameof(connector)); + + public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken) + => _connector.FetchAsync(context.Services, cancellationToken); +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/MsrcConnector.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/MsrcConnector.cs new file mode 100644 index 00000000..33114ff5 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/MsrcConnector.cs @@ -0,0 +1,447 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Security.Cryptography; +using System.Net.Http; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Bson; +using StellaOps.Feedser.Models; +using StellaOps.Feedser.Source.Common; +using StellaOps.Feedser.Source.Common.Fetch; +using StellaOps.Feedser.Source.Vndr.Msrc.Configuration; +using StellaOps.Feedser.Source.Vndr.Msrc.Internal; +using StellaOps.Feedser.Storage.Mongo; +using StellaOps.Feedser.Storage.Mongo.Advisories; +using StellaOps.Feedser.Storage.Mongo.Documents; +using StellaOps.Feedser.Storage.Mongo.Dtos; +using StellaOps.Plugin; + +namespace StellaOps.Feedser.Source.Vndr.Msrc; + +public sealed class MsrcConnector : IFeedConnector +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + WriteIndented = false, + }; + + private readonly MsrcApiClient _apiClient; + private readonly MsrcDetailParser _detailParser; + private readonly SourceFetchService _fetchService; + private readonly RawDocumentStorage _rawDocumentStorage; + private readonly IDocumentStore _documentStore; + private readonly IDtoStore _dtoStore; + private readonly IAdvisoryStore _advisoryStore; + private readonly ISourceStateRepository _stateRepository; + private readonly MsrcOptions _options; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + private readonly MsrcDiagnostics _diagnostics; + + public MsrcConnector( + MsrcApiClient apiClient, + MsrcDetailParser detailParser, + SourceFetchService fetchService, + RawDocumentStorage rawDocumentStorage, + IDocumentStore documentStore, + IDtoStore dtoStore, + IAdvisoryStore advisoryStore, + ISourceStateRepository stateRepository, + IOptions options, + TimeProvider? timeProvider, + MsrcDiagnostics diagnostics, + ILogger logger) + { + _apiClient = apiClient ?? throw new ArgumentNullException(nameof(apiClient)); + _detailParser = detailParser ?? throw new ArgumentNullException(nameof(detailParser)); + _fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService)); + _rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage)); + _documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore)); + _dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore)); + _advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore)); + _stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository)); + _options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options)); + _options.Validate(); + _timeProvider = timeProvider ?? TimeProvider.System; + _diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public string SourceName => MsrcConnectorPlugin.SourceName; + + public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + var now = _timeProvider.GetUtcNow(); + var from = cursor.LastModifiedCursor ?? _options.InitialLastModified ?? now.AddDays(-30); + from = from.Add(-_options.CursorOverlap); + var to = now; + + _diagnostics.SummaryFetchAttempt(); + IReadOnlyList summaries; + + try + { + summaries = await _apiClient.FetchSummariesAsync(from, to, cancellationToken).ConfigureAwait(false); + var windowHours = (to - from).TotalHours; + _diagnostics.SummaryFetchSuccess(summaries.Count, windowHours); + } + catch (Exception ex) + { + _diagnostics.SummaryFetchFailure("exception"); + _logger.LogError(ex, "MSRC summary fetch failed"); + await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false); + throw; + } + + if (summaries.Count == 0) + { + await UpdateCursorAsync(cursor.WithLastModifiedCursor(to), cancellationToken).ConfigureAwait(false); + return; + } + + var pendingDocuments = cursor.PendingDocuments.ToHashSet(); + var pendingMappings = cursor.PendingMappings.ToHashSet(); + var processed = 0; + var failures = 0; + + foreach (var summary in summaries.OrderBy(static s => s.LastModifiedDate ?? DateTimeOffset.MinValue)) + { + cancellationToken.ThrowIfCancellationRequested(); + if (processed >= _options.MaxAdvisoriesPerFetch) + { + break; + } + + var vulnerabilityId = string.IsNullOrWhiteSpace(summary.VulnerabilityId) ? summary.Id : summary.VulnerabilityId!; + var detailUri = _apiClient.BuildDetailUri(vulnerabilityId).ToString(); + + try + { + var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, detailUri, cancellationToken).ConfigureAwait(false); + if (existing is not null && !ShouldRefresh(summary, existing)) + { + _diagnostics.DetailFetchNotModified(); + continue; + } + + _diagnostics.DetailFetchAttempt(); + if (existing?.GridFsId is { } oldGridId) + { + await _rawDocumentStorage.DeleteAsync(oldGridId, cancellationToken).ConfigureAwait(false); + } + + var bytes = await _apiClient.FetchDetailAsync(vulnerabilityId, cancellationToken).ConfigureAwait(false); + var sha = Convert.ToHexString(SHA256.HashData(bytes)).ToLowerInvariant(); + + var gridId = await _rawDocumentStorage.UploadAsync(SourceName, detailUri, bytes, "application/json", cancellationToken).ConfigureAwait(false); + + var metadata = MsrcDocumentMetadata.CreateMetadata(summary); + var headers = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["content-type"] = "application/json", + }; + + var documentId = existing?.Id ?? Guid.NewGuid(); + var record = new DocumentRecord( + documentId, + SourceName, + detailUri, + now, + sha, + DocumentStatuses.PendingParse, + ContentType: "application/json", + Headers: headers, + metadata, + existing?.Etag, + summary.LastModifiedDate, + gridId); + + var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false); + + pendingDocuments.Add(upserted.Id); + pendingMappings.Remove(upserted.Id); + _diagnostics.DetailFetchSuccess(); + processed++; + + if (_options.DownloadCvrf) + { + await FetchCvrfAsync(summary, now, cancellationToken).ConfigureAwait(false); + } + + if (_options.RequestDelay > TimeSpan.Zero) + { + await Task.Delay(_options.RequestDelay, cancellationToken).ConfigureAwait(false); + } + } + catch (Exception ex) + { + _diagnostics.DetailFetchFailure("exception"); + failures++; + _logger.LogError(ex, "MSRC detail fetch failed for {VulnerabilityId}", vulnerabilityId); + await _stateRepository.MarkFailureAsync(SourceName, now, _options.FailureBackoff, ex.Message, cancellationToken).ConfigureAwait(false); + throw; + } + } + + _diagnostics.DetailEnqueued(processed); + + if (processed > 0 || failures > 0) + { + _logger.LogInformation("MSRC fetch cycle enqueued {Processed} advisories (failures={Failures}, pendingDocuments={PendingDocuments}, pendingMappings={PendingMappings})", processed, failures, pendingDocuments.Count, pendingMappings.Count); + } + + var latestCursor = summaries + .Where(static s => s.LastModifiedDate.HasValue) + .Select(static s => s.LastModifiedDate!.Value) + .DefaultIfEmpty(to) + .Max(); + + var updatedCursor = cursor + .WithPendingDocuments(pendingDocuments) + .WithPendingMappings(pendingMappings) + .WithLastModifiedCursor(latestCursor); + + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + } + + public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + if (cursor.PendingDocuments.Count == 0) + { + return; + } + + var remainingDocuments = cursor.PendingDocuments.ToHashSet(); + var pendingMappings = cursor.PendingMappings.ToHashSet(); + var now = _timeProvider.GetUtcNow(); + + foreach (var documentId in cursor.PendingDocuments) + { + cancellationToken.ThrowIfCancellationRequested(); + + var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); + if (document is null) + { + remainingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + continue; + } + + if (!document.GridFsId.HasValue) + { + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + remainingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + _diagnostics.ParseFailure("missing_payload"); + continue; + } + + byte[] payload; + try + { + payload = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _diagnostics.ParseFailure("download_failed"); + _logger.LogError(ex, "MSRC unable to download document {DocumentId}", document.Id); + throw; + } + + MsrcVulnerabilityDetailDto? detail; + try + { + detail = JsonSerializer.Deserialize(payload, SerializerOptions); + } + catch (Exception ex) + { + _diagnostics.ParseFailure("deserialize_failed"); + _logger.LogError(ex, "MSRC failed to deserialize detail payload for document {DocumentId}", document.Id); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + remainingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + continue; + } + + if (detail is null) + { + _diagnostics.ParseFailure("empty_payload"); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + remainingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + continue; + } + + var dto = _detailParser.Parse(detail); + var bson = BsonDocument.Parse(JsonSerializer.Serialize(dto, SerializerOptions)); + var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "msrc.detail.v1", bson, now); + await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false); + remainingDocuments.Remove(documentId); + pendingMappings.Add(document.Id); + _diagnostics.ParseSuccess(dto.Products.Count, dto.KbIds.Count); + } + + var updatedCursor = cursor + .WithPendingDocuments(remainingDocuments) + .WithPendingMappings(pendingMappings); + + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + } + + public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + if (cursor.PendingMappings.Count == 0) + { + return; + } + + var pendingMappings = cursor.PendingMappings.ToHashSet(); + + foreach (var documentId in cursor.PendingMappings) + { + cancellationToken.ThrowIfCancellationRequested(); + + var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); + if (document is null) + { + pendingMappings.Remove(documentId); + continue; + } + + var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false); + if (dtoRecord is null) + { + _diagnostics.MapFailure("missing_dto"); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + continue; + } + + MsrcAdvisoryDto? dto; + try + { + dto = JsonSerializer.Deserialize(dtoRecord.Payload.ToJson(), SerializerOptions); + } + catch (Exception ex) + { + _diagnostics.MapFailure("deserialize_dto_failed"); + _logger.LogError(ex, "MSRC failed to deserialize DTO for document {DocumentId}", document.Id); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + continue; + } + + if (dto is null) + { + _diagnostics.MapFailure("null_dto"); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + continue; + } + + try + { + var advisory = MsrcMapper.Map(dto, document, dtoRecord.ValidatedAt); + await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + _diagnostics.MapSuccess(advisory.Aliases.Length, advisory.AffectedPackages.Length); + } + catch (Exception ex) + { + _diagnostics.MapFailure("exception"); + _logger.LogError(ex, "MSRC mapping failed for document {DocumentId}", document.Id); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + } + } + + var updatedCursor = cursor.WithPendingMappings(pendingMappings); + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + } + + private bool ShouldRefresh(MsrcVulnerabilitySummary summary, DocumentRecord existing) + { + if (existing.Status == DocumentStatuses.Failed) + { + return true; + } + + if (summary.LastModifiedDate is null) + { + return true; + } + + if (existing.Metadata is null || !existing.Metadata.TryGetValue("msrc.lastModified", out var stored)) + { + return true; + } + + return !string.Equals(stored, summary.LastModifiedDate.Value.ToString("O"), StringComparison.OrdinalIgnoreCase); + } + + private async Task GetCursorAsync(CancellationToken cancellationToken) + { + var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false); + return state is null ? MsrcCursor.Empty : MsrcCursor.FromBson(state.Cursor); + } + + private async Task FetchCvrfAsync(MsrcVulnerabilitySummary summary, DateTimeOffset fetchedAt, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(summary.CvrfUrl)) + { + return; + } + + try + { + var uri = new Uri(summary.CvrfUrl); + var metadata = MsrcDocumentMetadata.CreateCvrfMetadata(summary); + var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, uri.ToString(), cancellationToken).ConfigureAwait(false); + var request = new SourceFetchRequest( + MsrcOptions.HttpClientName, + SourceName, + HttpMethod.Get, + uri, + metadata, + existing?.Etag, + existing?.LastModified, + AcceptHeaders: new[] { "application/zip", "application/xml", "application/json" }); + + var result = await _fetchService.FetchAsync(request, cancellationToken).ConfigureAwait(false); + if (result.IsNotModified || !result.IsSuccess || result.Document is null) + { + return; + } + + await _documentStore.UpdateStatusAsync(result.Document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false); + _logger.LogInformation("MSRC CVRF artefact captured for {AdvisoryId} ({Uri})", summary.VulnerabilityId ?? summary.Id, uri); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "MSRC CVRF download failed for {CvrfUrl}", summary.CvrfUrl); + } + } + + private Task UpdateCursorAsync(MsrcCursor cursor, CancellationToken cancellationToken) + { + var document = cursor.ToBsonDocument(); + var completedAt = _timeProvider.GetUtcNow(); + return _stateRepository.UpdateCursorAsync(SourceName, document, completedAt, cancellationToken); + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/MsrcConnectorPlugin.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/MsrcConnectorPlugin.cs new file mode 100644 index 00000000..61176fd9 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/MsrcConnectorPlugin.cs @@ -0,0 +1,21 @@ +using System; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Plugin; + +namespace StellaOps.Feedser.Source.Vndr.Msrc; + +public sealed class MsrcConnectorPlugin : IConnectorPlugin +{ + public const string SourceName = "vndr.msrc"; + + public string Name => SourceName; + + public bool IsAvailable(IServiceProvider services) + => services.GetService() is not null; + + public IFeedConnector Create(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + return services.GetRequiredService(); + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/MsrcDependencyInjectionRoutine.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/MsrcDependencyInjectionRoutine.cs new file mode 100644 index 00000000..685941e8 --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/MsrcDependencyInjectionRoutine.cs @@ -0,0 +1,50 @@ +using System; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.DependencyInjection; +using StellaOps.Feedser.Core.Jobs; +using StellaOps.Feedser.Source.Vndr.Msrc.Configuration; + +namespace StellaOps.Feedser.Source.Vndr.Msrc; + +public sealed class MsrcDependencyInjectionRoutine : IDependencyInjectionRoutine +{ + private const string ConfigurationSection = "feedser:sources:vndr:msrc"; + + public IServiceCollection Register(IServiceCollection services, IConfiguration configuration) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services.AddMsrcConnector(options => + { + configuration.GetSection(ConfigurationSection).Bind(options); + options.Validate(); + }); + + services.AddTransient(); + + services.PostConfigure(options => + { + EnsureJob(options, MsrcJobKinds.Fetch, typeof(MsrcFetchJob)); + }); + + return services; + } + + private static void EnsureJob(JobSchedulerOptions options, string kind, Type jobType) + { + if (options.Definitions.ContainsKey(kind)) + { + return; + } + + options.Definitions[kind] = new JobDefinition( + kind, + jobType, + options.DefaultTimeout, + options.DefaultLeaseDuration, + CronExpression: null, + Enabled: true); + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/MsrcServiceCollectionExtensions.cs b/src/StellaOps.Feedser.Source.Vndr.Msrc/MsrcServiceCollectionExtensions.cs new file mode 100644 index 00000000..d02b321b --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/MsrcServiceCollectionExtensions.cs @@ -0,0 +1,55 @@ +using System; +using System.Net; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; +using StellaOps.Feedser.Source.Common.Http; +using StellaOps.Feedser.Source.Vndr.Msrc.Configuration; +using StellaOps.Feedser.Source.Vndr.Msrc.Internal; + +namespace StellaOps.Feedser.Source.Vndr.Msrc; + +public static class MsrcServiceCollectionExtensions +{ + public static IServiceCollection AddMsrcConnector(this IServiceCollection services, Action configure) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configure); + + services.AddOptions() + .Configure(configure) + .PostConfigure(static options => options.Validate()); + + services.AddSourceHttpClient(MsrcOptions.HttpClientName, static (provider, clientOptions) => + { + var options = provider.GetRequiredService>().Value; + clientOptions.Timeout = TimeSpan.FromSeconds(30); + clientOptions.AllowedHosts.Clear(); + clientOptions.AllowedHosts.Add(options.BaseUri.Host); + clientOptions.AllowedHosts.Add("download.microsoft.com"); + clientOptions.ConfigureHandler = handler => + { + handler.AutomaticDecompression = DecompressionMethods.All; + }; + }); + + services.AddSourceHttpClient(MsrcOptions.TokenClientName, static (_, clientOptions) => + { + clientOptions.Timeout = TimeSpan.FromSeconds(30); + clientOptions.AllowedHosts.Clear(); + clientOptions.AllowedHosts.Add("login.microsoftonline.com"); + clientOptions.ConfigureHandler = handler => + { + handler.AutomaticDecompression = DecompressionMethods.All; + }; + }); + + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.TryAddSingleton(); + services.AddTransient(); + + return services; + } +} diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/README.md b/src/StellaOps.Feedser.Source.Vndr.Msrc/README.md new file mode 100644 index 00000000..6b45656e --- /dev/null +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/README.md @@ -0,0 +1,19 @@ +# MSRC Security Updates – Connector Notes + +## API endpoints +- **Vulnerability summaries** – `GET https://api.msrc.microsoft.com/sug/v2.0//vulnerabilities` (requires `api-version=2024-08-01`, client credential bearer token). +- **Vulnerability detail** – `GET https://api.msrc.microsoft.com/sug/v2.0//vulnerability/{id}` (same headers/scopes). +- **CVRF package** – the detail payload contains `cvrfUrl` pointing to a ZIP/JSON asset that is stable per revision. We surface the URL as a reference and capture it in metadata for future offline bundling. + +## Cursor behaviour +- Connector keeps a `lastModifiedCursor` and replays the previous 10 minutes on every fetch to cover late revisions. +- MSRC limits requests to ~60/minute; `requestDelay` defaults to 250 ms and is configurable. + +## Authentication +- Uses Azure AD client credential flow against `https://login.microsoftonline.com/{tenantId}/oauth2/v2.0/token` with scope `api://api.msrc.microsoft.com/.default`. +- Token refresh happens lazily and is cached until 60 seconds before expiry. +- Configuration values (`tenantId`, `clientId`, `clientSecret`) must be supplied via `feedser:sources:vndr:msrc`. + +## CVRF handling +- Detail payload is persisted with the `cvrfUrl` in metadata (`msrc.cvrfUrl`). +- Mapping stage emits the CVRF link as a reference so offline runs can fetch it later. When `DownloadCvrf` is enabled the connector also saves the ZIP artefact to the documents store (marked as `msrc.cvrf=true`) for Offline Kit staging. diff --git a/src/StellaOps.Feedser.Source.Vndr.Msrc/TASKS.md b/src/StellaOps.Feedser.Source.Vndr.Msrc/TASKS.md index 6bdb7fbc..8139644e 100644 --- a/src/StellaOps.Feedser.Source.Vndr.Msrc/TASKS.md +++ b/src/StellaOps.Feedser.Source.Vndr.Msrc/TASKS.md @@ -2,10 +2,10 @@ | Task | Owner(s) | Depends on | Notes | |---|---|---|---| |FEEDCONN-MSRC-02-001 Document MSRC Security Update Guide API|BE-Conn-MSRC|Research|**DONE (2025-10-11)** – Confirmed REST endpoint (`https://api.msrc.microsoft.com/sug/v2.0/en-US/vulnerabilities`) + CVRF ZIP download flow, required Azure AD client-credentials scope (`api://api.msrc.microsoft.com/.default`), mandatory `api-version=2024-08-01` header, and delta params (`lastModifiedStartDateTime`, `lastModifiedEndDateTime`). Findings recorded in `docs/feedser-connector-research-20251011.md`.| -|FEEDCONN-MSRC-02-002 Fetch pipeline & source state|BE-Conn-MSRC|Source.Common, Storage.Mongo|**TODO** – Implement fetch job that loops over `lastModifiedStartDateTime` cursor, handles `Retry-After` on throttling (default quota 60 req/min), and persists both REST JSON + optional CVRF attachments. Maintain source_state cursor at minute precision with overlap to cover delayed revisions.| -|FEEDCONN-MSRC-02-003 Parser & DTO implementation|BE-Conn-MSRC|Source.Common|**TODO** – Extract `vulnerabilityId`, `cveNumber`, `title`, `description`, `threats[]`, `remediations[]`, KB list, CVSS data, and `affectedProducts`. Map products into package identifiers (Windows build numbers, Office version) and capture `releaseNotes` URLs as references.| -|FEEDCONN-MSRC-02-004 Canonical mapping & range primitives|BE-Conn-MSRC|Models|**TODO** – Map advisories to canonical records with aliases, references, range primitives for product/build coverage. Coordinate scheme naming and normalized outputs with `../StellaOps.Feedser.Merge/RANGE_PRIMITIVES_COORDINATION.md`.
    2025-10-11 research trail: normalized array exemplar `[{"scheme":"semver","type":"range","min":"","minInclusive":true,"max":"","maxInclusive":false,"notes":"msrc:KB"}]`; if monthly rollups require `msrc.patch` scheme, gather samples and align with Models before emitting.| -|FEEDCONN-MSRC-02-005 Deterministic fixtures/tests|QA|Testing|**TODO** – Add regression tests with fixtures; support `UPDATE_MSRC_FIXTURES=1`.| -|FEEDCONN-MSRC-02-006 Telemetry & documentation|DevEx|Docs|**TODO** – Add logging/metrics and documentation; update backlog once connector is production-ready.| +|FEEDCONN-MSRC-02-002 Fetch pipeline & source state|BE-Conn-MSRC|Source.Common, Storage.Mongo|**DONE (2025-10-15)** – Added `MsrcApiClient` + token provider, cursor overlap handling, and detail persistence via GridFS (metadata carries CVRF URL + timestamps). State tracks `lastModifiedCursor` with configurable overlap/backoff. **Next:** coordinate with Tools on shared state-seeding helper once CVRF download flag stabilises.| +|FEEDCONN-MSRC-02-003 Parser & DTO implementation|BE-Conn-MSRC|Source.Common|**DONE (2025-10-15)** – Implemented `MsrcDetailParser`/DTOs capturing threats, remediations, KB IDs, CVEs, CVSS, and affected products (build/platform metadata preserved).| +|FEEDCONN-MSRC-02-004 Canonical mapping & range primitives|BE-Conn-MSRC|Models|**DONE (2025-10-15)** – `MsrcMapper` emits aliases (MSRC ID/CVE/KB), references (release notes + CVRF), vendor packages with `msrc.build` normalized rules, and CVSS provenance.| +|FEEDCONN-MSRC-02-005 Deterministic fixtures/tests|QA|Testing|**DONE (2025-10-15)** – Added `StellaOps.Feedser.Source.Vndr.Msrc.Tests` with canned token/summary/detail responses and snapshot assertions via Mongo2Go. Fixtures regenerate via `UPDATE_MSRC_FIXTURES`.| +|FEEDCONN-MSRC-02-006 Telemetry & documentation|DevEx|Docs|**DONE (2025-10-15)** – Introduced `MsrcDiagnostics` meter (summary/detail/parse/map metrics), structured fetch logs, README updates, and Ops brief `docs/ops/feedser-msrc-operations.md` covering AAD onboarding + CVRF handling.| |FEEDCONN-MSRC-02-007 API contract comparison memo|BE-Conn-MSRC|Research|**DONE (2025-10-11)** – Completed memo outline recommending dual-path (REST for incremental, CVRF for offline); implementation hinges on `FEEDCONN-MSRC-02-008` AAD onboarding for token acquisition.| -|FEEDCONN-MSRC-02-008 Azure AD application onboarding|Ops, BE-Conn-MSRC|Ops|**TODO** – Provision MSRC SUG app registration, document client credential flow, rotation cadence, and secure storage expectations for Offline Kit deployments.| +|FEEDCONN-MSRC-02-008 Azure AD application onboarding|Ops, BE-Conn-MSRC|Ops|**DONE (2025-10-15)** – Coordinated Ops handoff; drafted AAD onboarding brief (`docs/ops/feedser-msrc-operations.md`) with app registration requirements, secret rotation policy, sample configuration, and CVRF mirroring guidance for Offline Kit.| diff --git a/src/StellaOps.Feedser.Storage.Mongo/Advisories/AdvisoryStore.cs b/src/StellaOps.Feedser.Storage.Mongo/Advisories/AdvisoryStore.cs index 1275ab40..6862cc9b 100644 --- a/src/StellaOps.Feedser.Storage.Mongo/Advisories/AdvisoryStore.cs +++ b/src/StellaOps.Feedser.Storage.Mongo/Advisories/AdvisoryStore.cs @@ -355,18 +355,19 @@ public sealed class AdvisoryStore : IAdvisoryStore EvrPrimitive? evr = null; IReadOnlyDictionary? vendor = null; - if (document.TryGetValue("semVer", out var semverValue) && semverValue.IsBsonDocument) - { - var semverDoc = semverValue.AsBsonDocument; - semVer = new SemVerPrimitive( - semverDoc.TryGetValue("introduced", out var semIntroduced) && semIntroduced.IsString ? semIntroduced.AsString : null, - semverDoc.TryGetValue("introducedInclusive", out var semIntroducedInclusive) && semIntroducedInclusive.IsBoolean && semIntroducedInclusive.AsBoolean, - semverDoc.TryGetValue("fixed", out var semFixed) && semFixed.IsString ? semFixed.AsString : null, - semverDoc.TryGetValue("fixedInclusive", out var semFixedInclusive) && semFixedInclusive.IsBoolean && semFixedInclusive.AsBoolean, - semverDoc.TryGetValue("lastAffected", out var semLast) && semLast.IsString ? semLast.AsString : null, - semverDoc.TryGetValue("lastAffectedInclusive", out var semLastInclusive) && semLastInclusive.IsBoolean && semLastInclusive.AsBoolean, - semverDoc.TryGetValue("constraintExpression", out var constraint) && constraint.IsString ? constraint.AsString : null); - } + if (document.TryGetValue("semVer", out var semverValue) && semverValue.IsBsonDocument) + { + var semverDoc = semverValue.AsBsonDocument; + semVer = new SemVerPrimitive( + semverDoc.TryGetValue("introduced", out var semIntroduced) && semIntroduced.IsString ? semIntroduced.AsString : null, + semverDoc.TryGetValue("introducedInclusive", out var semIntroducedInclusive) && semIntroducedInclusive.IsBoolean && semIntroducedInclusive.AsBoolean, + semverDoc.TryGetValue("fixed", out var semFixed) && semFixed.IsString ? semFixed.AsString : null, + semverDoc.TryGetValue("fixedInclusive", out var semFixedInclusive) && semFixedInclusive.IsBoolean && semFixedInclusive.AsBoolean, + semverDoc.TryGetValue("lastAffected", out var semLast) && semLast.IsString ? semLast.AsString : null, + semverDoc.TryGetValue("lastAffectedInclusive", out var semLastInclusive) && semLastInclusive.IsBoolean && semLastInclusive.AsBoolean, + semverDoc.TryGetValue("constraintExpression", out var constraint) && constraint.IsString ? constraint.AsString : null, + semverDoc.TryGetValue("exactValue", out var exact) && exact.IsString ? exact.AsString : null); + } if (document.TryGetValue("nevra", out var nevraValue) && nevraValue.IsBsonDocument) { diff --git a/src/StellaOps.Feedser.Testing/MongoIntegrationFixture.cs b/src/StellaOps.Feedser.Testing/MongoIntegrationFixture.cs index c782edf8..c1843801 100644 --- a/src/StellaOps.Feedser.Testing/MongoIntegrationFixture.cs +++ b/src/StellaOps.Feedser.Testing/MongoIntegrationFixture.cs @@ -1,9 +1,12 @@ -using MongoDB.Bson; -using Mongo2Go; -using Xunit; -using MongoDB.Driver; - -namespace StellaOps.Feedser.Testing; +using System; +using System.IO; +using System.Linq; +using MongoDB.Bson; +using Mongo2Go; +using Xunit; +using MongoDB.Driver; + +namespace StellaOps.Feedser.Testing; public sealed class MongoIntegrationFixture : IAsyncLifetime { @@ -11,17 +14,68 @@ public sealed class MongoIntegrationFixture : IAsyncLifetime public IMongoDatabase Database { get; private set; } = null!; public IMongoClient Client { get; private set; } = null!; - public Task InitializeAsync() - { - Runner = MongoDbRunner.Start(singleNodeReplSet: true); - Client = new MongoClient(Runner.ConnectionString); - Database = Client.GetDatabase($"feedser-tests-{Guid.NewGuid():N}"); - return Task.CompletedTask; - } + public Task InitializeAsync() + { + EnsureMongo2GoEnvironment(); + Runner = MongoDbRunner.Start(singleNodeReplSet: true); + Client = new MongoClient(Runner.ConnectionString); + Database = Client.GetDatabase($"feedser-tests-{Guid.NewGuid():N}"); + return Task.CompletedTask; + } - public Task DisposeAsync() - { - Runner.Dispose(); - return Task.CompletedTask; - } -} + public Task DisposeAsync() + { + Runner.Dispose(); + return Task.CompletedTask; + } + + private static void EnsureMongo2GoEnvironment() + { + if (!OperatingSystem.IsLinux()) + { + return; + } + + var libraryPath = ResolveOpenSslLibraryPath(); + if (libraryPath is null) + { + return; + } + + var existing = Environment.GetEnvironmentVariable("LD_LIBRARY_PATH"); + if (string.IsNullOrEmpty(existing)) + { + Environment.SetEnvironmentVariable("LD_LIBRARY_PATH", libraryPath); + return; + } + + var segments = existing.Split(':', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (!segments.Contains(libraryPath, StringComparer.Ordinal)) + { + Environment.SetEnvironmentVariable("LD_LIBRARY_PATH", string.Join(':', new[] { libraryPath }.Concat(segments))); + } + } + + private static string? ResolveOpenSslLibraryPath() + { + var current = AppContext.BaseDirectory; + while (!string.IsNullOrEmpty(current)) + { + var candidate = Path.Combine(current, "tools", "openssl", "linux-x64"); + if (Directory.Exists(candidate)) + { + return candidate; + } + + var parent = Directory.GetParent(current); + if (parent is null) + { + break; + } + + current = parent.FullName; + } + + return null; + } +} diff --git a/src/StellaOps.Feedser.WebService/Extensions/TelemetryExtensions.cs b/src/StellaOps.Feedser.WebService/Extensions/TelemetryExtensions.cs index 606b01d8..8d67a09b 100644 --- a/src/StellaOps.Feedser.WebService/Extensions/TelemetryExtensions.cs +++ b/src/StellaOps.Feedser.WebService/Extensions/TelemetryExtensions.cs @@ -84,6 +84,7 @@ public static class TelemetryExtensions metrics .AddMeter(JobDiagnostics.MeterName) .AddMeter(SourceDiagnostics.MeterName) + .AddMeter("StellaOps.Feedser.Source.CertBund") .AddMeter("StellaOps.Feedser.Source.Nvd") .AddMeter("StellaOps.Feedser.Source.Vndr.Chromium") .AddMeter("StellaOps.Feedser.Source.Vndr.Apple") diff --git a/src/StellaOps.sln b/src/StellaOps.sln index d0d7393a..7a21e35e 100644 --- a/src/StellaOps.sln +++ b/src/StellaOps.sln @@ -135,6 +135,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Feedser.Source.Vn EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Feedser.Source.Vndr.Cisco", "StellaOps.Feedser.Source.Vndr.Cisco\StellaOps.Feedser.Source.Vndr.Cisco.csproj", "{CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Feedser.Source.Vndr.Cisco.Tests", "StellaOps.Feedser.Source.Vndr.Cisco.Tests\StellaOps.Feedser.Source.Vndr.Cisco.Tests.csproj", "{99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}" +EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Feedser.Source.Vndr.Msrc", "StellaOps.Feedser.Source.Vndr.Msrc\StellaOps.Feedser.Source.Vndr.Msrc.csproj", "{5CCE0DB7-C115-4B21-A7AE-C8488C22A853}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Feedser.Source.Vndr.Oracle", "StellaOps.Feedser.Source.Vndr.Oracle\StellaOps.Feedser.Source.Vndr.Oracle.csproj", "{A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}" @@ -957,6 +959,18 @@ Global {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Release|x64.Build.0 = Release|Any CPU {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Release|x86.ActiveCfg = Release|Any CPU {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Release|x86.Build.0 = Release|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|Any CPU.Build.0 = Debug|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|x64.ActiveCfg = Debug|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|x64.Build.0 = Debug|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|x86.ActiveCfg = Debug|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|x86.Build.0 = Debug|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|Any CPU.ActiveCfg = Release|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|Any CPU.Build.0 = Release|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|x64.ActiveCfg = Release|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|x64.Build.0 = Release|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|x86.ActiveCfg = Release|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|x86.Build.0 = Release|Any CPU {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Debug|Any CPU.Build.0 = Debug|Any CPU {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Debug|x64.ActiveCfg = Debug|Any CPU @@ -1291,6 +1305,7 @@ Global {606C751B-7CF1-47CF-A25C-9248A55C814F} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} {5CCE0DB7-C115-4B21-A7AE-C8488C22A853} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} {06DC817F-A936-4F83-8929-E00622B32245} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} diff --git a/tools/SourceStateSeeder/Program.cs b/tools/SourceStateSeeder/Program.cs new file mode 100644 index 00000000..183c5166 --- /dev/null +++ b/tools/SourceStateSeeder/Program.cs @@ -0,0 +1,382 @@ +using System.Globalization; +using System.Security.Cryptography; +using System.Text.Json; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Feedser.Source.Common; +using StellaOps.Feedser.Source.Common.Fetch; +using StellaOps.Feedser.Storage.Mongo; +using StellaOps.Feedser.Storage.Mongo.Documents; + +namespace SourceStateSeeder; + +internal static class Program +{ + private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + ReadCommentHandling = JsonCommentHandling.Skip, + AllowTrailingCommas = true, + }; + + public static async Task Main(string[] args) + { + try + { + var options = SeedOptions.Parse(args); + if (options is null) + { + SeedOptions.PrintUsage(); + return 1; + } + + var seed = await LoadSpecificationAsync(options.InputPath).ConfigureAwait(false); + var sourceName = seed.Source ?? options.SourceName; + if (string.IsNullOrWhiteSpace(sourceName)) + { + Console.Error.WriteLine("Source name must be supplied via --source or the seed file."); + return 1; + } + + var client = new MongoClient(options.ConnectionString); + var database = client.GetDatabase(options.DatabaseName); + + var loggerFactory = NullLoggerFactory.Instance; + var documentStore = new DocumentStore(database, loggerFactory.CreateLogger()); + var rawStorage = new RawDocumentStorage(database); + var stateRepository = new MongoSourceStateRepository(database, loggerFactory.CreateLogger()); + + var pendingDocumentIds = new List(); + var pendingMappingIds = new List(); + var knownAdvisories = new List(); + + var now = DateTimeOffset.UtcNow; + var baseDirectory = Path.GetDirectoryName(Path.GetFullPath(options.InputPath)) ?? Directory.GetCurrentDirectory(); + + foreach (var document in seed.Documents) + { + var (record, addedToPendingDocs, addedToPendingMaps, known) = await UpsertDocumentAsync( + documentStore, + rawStorage, + sourceName, + baseDirectory, + now, + document, + cancellationToken: default).ConfigureAwait(false); + + if (addedToPendingDocs) + { + pendingDocumentIds.Add(record.Id); + } + + if (addedToPendingMaps) + { + pendingMappingIds.Add(record.Id); + } + + if (known is not null) + { + knownAdvisories.AddRange(known); + } + } + + await UpdateCursorAsync( + stateRepository, + sourceName, + seed.Cursor, + pendingDocumentIds, + pendingMappingIds, + knownAdvisories, + now).ConfigureAwait(false); + + Console.WriteLine($"Seeded {pendingDocumentIds.Count + pendingMappingIds.Count} documents for {sourceName}."); + return 0; + } + catch (Exception ex) + { + Console.Error.WriteLine($"Error: {ex.Message}"); + return 1; + } + } + + private static async Task LoadSpecificationAsync(string inputPath) + { + await using var stream = File.OpenRead(inputPath); + var seed = await JsonSerializer.DeserializeAsync(stream, JsonOptions).ConfigureAwait(false) + ?? throw new InvalidOperationException("Input file deserialized to null."); + return seed; + } + + private static async Task<(DocumentRecord Record, bool PendingDoc, bool PendingMap, IReadOnlyCollection? Known)> UpsertDocumentAsync( + DocumentStore documentStore, + RawDocumentStorage rawStorage, + string sourceName, + string baseDirectory, + DateTimeOffset fetchedAt, + DocumentSeed seed, + CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(seed.Uri)) + { + throw new InvalidOperationException("Seed entry missing 'uri'."); + } + + if (string.IsNullOrWhiteSpace(seed.ContentFile)) + { + throw new InvalidOperationException($"Seed entry for '{seed.Uri}' missing 'contentFile'."); + } + + var contentPath = Path.IsPathRooted(seed.ContentFile) + ? seed.ContentFile + : Path.GetFullPath(Path.Combine(baseDirectory, seed.ContentFile)); + + if (!File.Exists(contentPath)) + { + throw new FileNotFoundException($"Content file not found for '{seed.Uri}'.", contentPath); + } + + var contentBytes = await File.ReadAllBytesAsync(contentPath, cancellationToken).ConfigureAwait(false); + var sha256 = Convert.ToHexString(SHA256.HashData(contentBytes)).ToLowerInvariant(); + var gridId = await rawStorage.UploadAsync( + sourceName, + seed.Uri, + contentBytes, + seed.ContentType, + seed.ExpiresAt, + cancellationToken).ConfigureAwait(false); + + var metadata = seed.Metadata is null + ? new Dictionary(StringComparer.OrdinalIgnoreCase) + : new Dictionary(seed.Metadata, StringComparer.OrdinalIgnoreCase); + + var headers = seed.Headers is null + ? new Dictionary(StringComparer.OrdinalIgnoreCase) + : new Dictionary(seed.Headers, StringComparer.OrdinalIgnoreCase); + + if (!headers.ContainsKey("content-type") && !string.IsNullOrWhiteSpace(seed.ContentType)) + { + headers["content-type"] = seed.ContentType!; + } + + var lastModified = seed.LastModified is null + ? (DateTimeOffset?)null + : DateTimeOffset.Parse(seed.LastModified, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal); + + var record = new DocumentRecord( + Guid.NewGuid(), + sourceName, + seed.Uri, + fetchedAt, + sha256, + string.IsNullOrWhiteSpace(seed.Status) ? DocumentStatuses.PendingParse : seed.Status, + seed.ContentType, + headers, + metadata, + seed.Etag, + lastModified, + gridId, + seed.ExpiresAt); + + var upserted = await documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false); + + return (upserted, seed.AddToPendingDocuments, seed.AddToPendingMappings, seed.KnownIdentifiers); + } + + private static async Task UpdateCursorAsync( + ISourceStateRepository repository, + string sourceName, + CursorSeed? cursorSeed, + IReadOnlyCollection pendingDocuments, + IReadOnlyCollection pendingMappings, + IReadOnlyCollection knownAdvisories, + DateTimeOffset completedAt) + { + var state = await repository.TryGetAsync(sourceName, CancellationToken.None).ConfigureAwait(false); + var cursor = state?.Cursor ?? new BsonDocument(); + + MergeGuidArray(cursor, "pendingDocuments", pendingDocuments); + MergeGuidArray(cursor, "pendingMappings", pendingMappings); + + if (knownAdvisories.Count > 0) + { + MergeStringArray(cursor, "knownAdvisories", knownAdvisories); + } + + if (cursorSeed is not null) + { + if (cursorSeed.LastModifiedCursor.HasValue) + { + cursor["lastModifiedCursor"] = cursorSeed.LastModifiedCursor.Value.UtcDateTime; + } + + if (cursorSeed.LastFetchAt.HasValue) + { + cursor["lastFetchAt"] = cursorSeed.LastFetchAt.Value.UtcDateTime; + } + + if (cursorSeed.Additional is not null) + { + foreach (var kvp in cursorSeed.Additional) + { + cursor[kvp.Key] = kvp.Value; + } + } + } + + cursor["lastSeededAt"] = completedAt.UtcDateTime; + + await repository.UpdateCursorAsync(sourceName, cursor, completedAt, CancellationToken.None).ConfigureAwait(false); + } + + private static void MergeGuidArray(BsonDocument cursor, string field, IReadOnlyCollection values) + { + if (values.Count == 0) + { + return; + } + + var existing = cursor.TryGetValue(field, out var value) && value is BsonArray array + ? array.Select(v => Guid.TryParse(v?.AsString, out var parsed) ? parsed : Guid.Empty) + .Where(g => g != Guid.Empty) + .ToHashSet() + : new HashSet(); + + foreach (var guid in values) + { + existing.Add(guid); + } + + cursor[field] = new BsonArray(existing.Select(g => g.ToString())); + } + + private static void MergeStringArray(BsonDocument cursor, string field, IReadOnlyCollection values) + { + if (values.Count == 0) + { + return; + } + + var existing = cursor.TryGetValue(field, out var value) && value is BsonArray array + ? array.Select(v => v?.AsString ?? string.Empty) + .Where(s => !string.IsNullOrWhiteSpace(s)) + .ToHashSet(StringComparer.OrdinalIgnoreCase) + : new HashSet(StringComparer.OrdinalIgnoreCase); + + foreach (var entry in values) + { + if (!string.IsNullOrWhiteSpace(entry)) + { + existing.Add(entry.Trim()); + } + } + + cursor[field] = new BsonArray(existing.OrderBy(s => s, StringComparer.OrdinalIgnoreCase)); + } +} + +internal sealed record SeedOptions +{ + public required string ConnectionString { get; init; } + public required string DatabaseName { get; init; } + public required string InputPath { get; init; } + public string? SourceName { get; init; } + + public static SeedOptions? Parse(string[] args) + { + string? connectionString = null; + string? database = null; + string? input = null; + string? source = null; + + for (var i = 0; i < args.Length; i++) + { + var arg = args[i]; + switch (arg) + { + case "--connection-string": + case "-c": + connectionString = TakeValue(args, ref i, arg); + break; + case "--database": + case "-d": + database = TakeValue(args, ref i, arg); + break; + case "--input": + case "-i": + input = TakeValue(args, ref i, arg); + break; + case "--source": + case "-s": + source = TakeValue(args, ref i, arg); + break; + case "--help": + case "-h": + return null; + default: + Console.Error.WriteLine($"Unrecognized argument '{arg}'."); + return null; + } + } + + if (string.IsNullOrWhiteSpace(connectionString) || string.IsNullOrWhiteSpace(database) || string.IsNullOrWhiteSpace(input)) + { + return null; + } + + return new SeedOptions + { + ConnectionString = connectionString, + DatabaseName = database, + InputPath = input, + SourceName = source, + }; + } + + public static void PrintUsage() + { + Console.WriteLine("Usage: dotnet run --project tools/SourceStateSeeder -- --connection-string --database --input [--source ]"); + } + + private static string TakeValue(string[] args, ref int index, string arg) + { + if (index + 1 >= args.Length) + { + throw new ArgumentException($"Missing value for {arg}."); + } + + index++; + return args[index]; + } +} + +internal sealed record StateSeed +{ + public string? Source { get; init; } + public List Documents { get; init; } = new(); + public CursorSeed? Cursor { get; init; } +} + +internal sealed record DocumentSeed +{ + public string Uri { get; init; } = string.Empty; + public string ContentFile { get; init; } = string.Empty; + public string? ContentType { get; init; } + public Dictionary? Metadata { get; init; } + public Dictionary? Headers { get; init; } + public string Status { get; init; } = DocumentStatuses.PendingParse; + public bool AddToPendingDocuments { get; init; } = true; + public bool AddToPendingMappings { get; init; } + public string? LastModified { get; init; } + public string? Etag { get; init; } + public DateTimeOffset? ExpiresAt { get; init; } + public IReadOnlyCollection? KnownIdentifiers { get; init; } +} + +internal sealed record CursorSeed +{ + public DateTimeOffset? LastModifiedCursor { get; init; } + public DateTimeOffset? LastFetchAt { get; init; } + public Dictionary? Additional { get; init; } +} diff --git a/tools/SourceStateSeeder/SourceStateSeeder.csproj b/tools/SourceStateSeeder/SourceStateSeeder.csproj new file mode 100644 index 00000000..0a1cbb03 --- /dev/null +++ b/tools/SourceStateSeeder/SourceStateSeeder.csproj @@ -0,0 +1,12 @@ + + + Exe + net10.0 + enable + enable + + + + + + diff --git a/tools/openssl/linux-x64/libcrypto.so.1.1 b/tools/openssl/linux-x64/libcrypto.so.1.1 new file mode 100644 index 00000000..2caf7aab Binary files /dev/null and b/tools/openssl/linux-x64/libcrypto.so.1.1 differ diff --git a/tools/openssl/linux-x64/libssl.so.1.1 b/tools/openssl/linux-x64/libssl.so.1.1 new file mode 100644 index 00000000..0fb2394c Binary files /dev/null and b/tools/openssl/linux-x64/libssl.so.1.1 differ