diff --git a/docs/airgap/importer-scaffold.md b/docs/airgap/importer-scaffold.md index a40bd0498..d6b8fa5c6 100644 --- a/docs/airgap/importer-scaffold.md +++ b/docs/airgap/importer-scaffold.md @@ -18,13 +18,20 @@ - Expanded tests for DSSE, TUF, Merkle helpers. - Added trust store + root rotation policy (dual approval) and import validator that coordinates DSSE/TUF/Merkle/rotation checks. +## Updates (2025-12-15) +- Added monotonicity enforcement primitives under `src/AirGap/StellaOps.AirGap.Importer/Versioning/` (`BundleVersion`, `IVersionMonotonicityChecker`, `IBundleVersionStore`). +- Added file-based quarantine service under `src/AirGap/StellaOps.AirGap.Importer/Quarantine/` (`IQuarantineService`, `FileSystemQuarantineService`, `QuarantineOptions`). +- Updated `ImportValidator` to include monotonicity checks, force-activate support (requires reason), and quarantine on validation failures. +- Added Postgres-backed bundle version tracking in `src/AirGap/StellaOps.AirGap.Storage.Postgres/Repositories/PostgresBundleVersionStore.cs` and registration via `src/AirGap/StellaOps.AirGap.Storage.Postgres/ServiceCollectionExtensions.cs`. +- Updated tests in `tests/AirGap/StellaOps.AirGap.Importer.Tests` to cover versioning/quarantine and the new import validator behavior. + ## Next implementation hooks - Replace placeholder plan with actual DSSE + TUF verifiers; keep step ordering stable. - Feed trust roots from sealed-mode config and Evidence Locker bundles (once available) before allowing imports. - Record audit trail for each plan step (success/failure) and a Merkle root of staged content. ## Determinism/air-gap posture -- No network dependencies; only BCL used. +- No network dependencies; BCL + `Microsoft.Extensions.*` only. - Tests use cached local NuGet feed (`local-nugets/`). - Plan steps are ordered list; do not reorder without bumping downstream replay expectations. diff --git a/docs/airgap/runbooks/quarantine-investigation.md b/docs/airgap/runbooks/quarantine-investigation.md new file mode 100644 index 000000000..3ccd1af83 --- /dev/null +++ b/docs/airgap/runbooks/quarantine-investigation.md @@ -0,0 +1,39 @@ +# AirGap Quarantine Investigation Runbook + +## Purpose +Quarantine preserves failed bundle imports for offline forensic analysis. It keeps the original bundle and the verification context (reason + logs) so operators can diagnose tampering, trust-root drift, or packaging issues without re-running in an online environment. + +## Location & Structure +Default root: `/updates/quarantine` + +Per-tenant layout: +`/updates/quarantine//--/` + +Removal staging: +`/updates/quarantine//.removed//` + +## Files in a quarantine entry +- `bundle.tar.zst` - the original bundle as provided +- `manifest.json` - bundle manifest (when available) +- `verification.log` - validation step output (TUF/DSSE/Merkle/rotation/monotonicity, etc.) +- `failure-reason.txt` - human-readable failure summary (reason + timestamp + metadata) +- `quarantine.json` - structured metadata for listing/automation + +## Investigation steps (offline) +1. Identify the tenant and locate the quarantine root on the importer host. +2. Pick the newest quarantine entry for the tenant (timestamp prefix). +3. Read `failure-reason.txt` first to capture the top-level reason and metadata. +4. Review `verification.log` for the precise failing step. +5. If needed, extract and inspect `bundle.tar.zst` in an isolated workspace (no network). +6. Decide whether the entry should be retained (for audit) or removed after investigation. + +## Removal & Retention +- Removal requires a human-provided reason (audit trail). Implementations should use the quarantine service’s remove operation which moves entries under `.removed/`. +- Retention and quota controls are configured via `AirGap:Quarantine` settings (root, TTL, max size); TTL cleanup can remove entries older than the retention period. + +## Common failure categories +- `tuf:*` - invalid/expired metadata or snapshot hash mismatch +- `dsse:*` - signature invalid or trust root mismatch +- `merkle-*` - payload entry set invalid or empty +- `rotation:*` - root rotation policy failure (dual approval, no-op rotation, etc.) +- `version-non-monotonic:*` - rollback prevention triggered (force activation requires a justification) diff --git a/docs/db/schemas/scanner.sql b/docs/db/schemas/scanner.sql new file mode 100644 index 000000000..0bfbbcf06 --- /dev/null +++ b/docs/db/schemas/scanner.sql @@ -0,0 +1,175 @@ +-- ============================================================================= +-- SCANNER SCHEMA - ProofSpine Audit Trail Tables +-- Version: V3100_001 +-- Sprint: SPRINT_3100_0001_0001 +-- ============================================================================= + +CREATE SCHEMA IF NOT EXISTS scanner; + +-- ============================================================================= +-- PROOF SPINES +-- ============================================================================= + +-- Main proof spine table - represents a complete verifiable decision chain +-- from SBOM through vulnerability matching to final VEX verdict +CREATE TABLE scanner.proof_spines ( + spine_id TEXT PRIMARY KEY, + artifact_id TEXT NOT NULL, + vuln_id TEXT NOT NULL, + policy_profile_id TEXT NOT NULL, + verdict TEXT NOT NULL CHECK (verdict IN ( + 'not_affected', 'affected', 'fixed', 'under_investigation' + )), + verdict_reason TEXT, + root_hash TEXT NOT NULL, + scan_run_id TEXT NOT NULL, + segment_count INT NOT NULL DEFAULT 0, + created_at_utc TIMESTAMPTZ NOT NULL DEFAULT NOW(), + superseded_by_spine_id TEXT REFERENCES scanner.proof_spines(spine_id), + + -- Deterministic spine ID = hash(artifact_id + vuln_id + policy_profile_id + root_hash) + CONSTRAINT proof_spines_unique_decision UNIQUE (artifact_id, vuln_id, policy_profile_id, root_hash) +); + +-- Composite index for common lookups +CREATE INDEX idx_proof_spines_lookup + ON scanner.proof_spines(artifact_id, vuln_id, policy_profile_id); +CREATE INDEX idx_proof_spines_scan_run + ON scanner.proof_spines(scan_run_id); +CREATE INDEX idx_proof_spines_created + ON scanner.proof_spines(created_at_utc DESC); +CREATE INDEX idx_proof_spines_verdict + ON scanner.proof_spines(verdict); + +-- ============================================================================= +-- PROOF SEGMENTS +-- ============================================================================= + +-- Individual segments within a spine - each segment is DSSE-signed +CREATE TABLE scanner.proof_segments ( + segment_id TEXT PRIMARY KEY, + spine_id TEXT NOT NULL REFERENCES scanner.proof_spines(spine_id) ON DELETE CASCADE, + idx INT NOT NULL, + segment_type TEXT NOT NULL CHECK (segment_type IN ( + 'SbomSlice', 'Match', 'Reachability', + 'GuardAnalysis', 'RuntimeObservation', 'PolicyEval' + )), + input_hash TEXT NOT NULL, + result_hash TEXT NOT NULL, + prev_segment_hash TEXT, + envelope_json TEXT NOT NULL, -- DSSE envelope as JSON + tool_id TEXT NOT NULL, + tool_version TEXT NOT NULL, + status TEXT NOT NULL DEFAULT 'Pending' CHECK (status IN ( + 'Pending', 'Verified', 'Partial', 'Invalid', 'Untrusted' + )), + created_at_utc TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + CONSTRAINT proof_segments_unique_idx UNIQUE (spine_id, idx) +); + +CREATE INDEX idx_proof_segments_spine ON scanner.proof_segments(spine_id); +CREATE INDEX idx_proof_segments_type ON scanner.proof_segments(segment_type); +CREATE INDEX idx_proof_segments_status ON scanner.proof_segments(status); + +-- ============================================================================= +-- PROOF SPINE HISTORY +-- ============================================================================= + +-- Audit trail for spine lifecycle events (creation, supersession, verification) +CREATE TABLE scanner.proof_spine_history ( + history_id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + spine_id TEXT NOT NULL REFERENCES scanner.proof_spines(spine_id), + action TEXT NOT NULL CHECK (action IN ( + 'created', 'superseded', 'verified', 'invalidated' + )), + actor TEXT, + reason TEXT, + occurred_at_utc TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +CREATE INDEX idx_proof_spine_history_spine ON scanner.proof_spine_history(spine_id); +CREATE INDEX idx_proof_spine_history_action ON scanner.proof_spine_history(action); +CREATE INDEX idx_proof_spine_history_occurred ON scanner.proof_spine_history(occurred_at_utc DESC); + +-- ============================================================================= +-- VERIFICATION CACHE +-- ============================================================================= + +-- Caches verification results to avoid re-verifying unchanged spines +CREATE TABLE scanner.proof_spine_verification_cache ( + spine_id TEXT PRIMARY KEY REFERENCES scanner.proof_spines(spine_id) ON DELETE CASCADE, + verified_at_utc TIMESTAMPTZ NOT NULL DEFAULT NOW(), + verifier_version TEXT NOT NULL, + all_segments_valid BOOLEAN NOT NULL, + invalid_segment_ids TEXT[], + signature_algorithm TEXT NOT NULL, + key_fingerprint TEXT NOT NULL +); + +CREATE INDEX idx_verification_cache_verified ON scanner.proof_spine_verification_cache(verified_at_utc DESC); + +-- ============================================================================= +-- FUNCTIONS +-- ============================================================================= + +-- Function to update segment count after segment insert +CREATE OR REPLACE FUNCTION scanner.update_spine_segment_count() +RETURNS TRIGGER AS $$ +BEGIN + UPDATE scanner.proof_spines + SET segment_count = ( + SELECT COUNT(*) FROM scanner.proof_segments WHERE spine_id = NEW.spine_id + ) + WHERE spine_id = NEW.spine_id; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +-- Trigger to maintain segment count +CREATE TRIGGER trg_update_segment_count +AFTER INSERT OR DELETE ON scanner.proof_segments +FOR EACH ROW EXECUTE FUNCTION scanner.update_spine_segment_count(); + +-- Function to record history on spine events +CREATE OR REPLACE FUNCTION scanner.record_spine_history() +RETURNS TRIGGER AS $$ +BEGIN + IF TG_OP = 'INSERT' THEN + INSERT INTO scanner.proof_spine_history (spine_id, action, reason) + VALUES (NEW.spine_id, 'created', 'Spine created'); + ELSIF TG_OP = 'UPDATE' AND NEW.superseded_by_spine_id IS NOT NULL + AND OLD.superseded_by_spine_id IS NULL THEN + INSERT INTO scanner.proof_spine_history (spine_id, action, reason) + VALUES (OLD.spine_id, 'superseded', 'Superseded by ' || NEW.superseded_by_spine_id); + END IF; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +-- Trigger to record spine history +CREATE TRIGGER trg_record_spine_history +AFTER INSERT OR UPDATE ON scanner.proof_spines +FOR EACH ROW EXECUTE FUNCTION scanner.record_spine_history(); + +-- ============================================================================= +-- COMMENTS +-- ============================================================================= + +COMMENT ON TABLE scanner.proof_spines IS + 'Verifiable decision chains from SBOM to VEX verdict with cryptographic integrity'; + +COMMENT ON TABLE scanner.proof_segments IS + 'Individual DSSE-signed evidence segments within a proof spine'; + +COMMENT ON TABLE scanner.proof_spine_history IS + 'Audit trail for spine lifecycle events'; + +COMMENT ON COLUMN scanner.proof_spines.root_hash IS + 'SHA256 hash of concatenated segment result hashes for tamper detection'; + +COMMENT ON COLUMN scanner.proof_segments.prev_segment_hash IS + 'Hash chain linking - NULL for first segment, result_hash of previous segment otherwise'; + +COMMENT ON COLUMN scanner.proof_segments.envelope_json IS + 'DSSE envelope containing signed segment payload'; diff --git a/docs/implplan/SPRINT_0338_0001_0001_airgap_importer_core.md b/docs/implplan/SPRINT_0338_0001_0001_airgap_importer_core.md index 0990da478..9bff67d7b 100644 --- a/docs/implplan/SPRINT_0338_0001_0001_airgap_importer_core.md +++ b/docs/implplan/SPRINT_0338_0001_0001_airgap_importer_core.md @@ -1,40 +1,71 @@ -# Sprint 0338-0001-0001: AirGap Importer Core Enhancements +# Sprint 0338.0001.0001 - AirGap Importer Monotonicity & Quarantine -**Sprint ID:** SPRINT_0338_0001_0001 -**Topic:** AirGap Importer Monotonicity & Quarantine -**Priority:** P0 (Critical) -**Working Directory:** `src/AirGap/StellaOps.AirGap.Importer/` -**Related Modules:** `StellaOps.AirGap.Controller`, `StellaOps.ExportCenter.Core` +## Topic & Scope +- Implement rollback prevention (monotonicity enforcement) and failed-bundle quarantine handling for the AirGap Importer to prevent replay attacks and support forensic analysis of failed imports. +- **Sprint ID:** `SPRINT_0338_0001_0001` +- **Priority:** P0 (Critical) +- **Working directory:** `src/AirGap/StellaOps.AirGap.Importer/` (primary); allowed cross-module edits: `src/AirGap/StellaOps.AirGap.Storage.Postgres/`, `src/AirGap/StellaOps.AirGap.Storage.Postgres.Tests/`, `tests/AirGap/StellaOps.AirGap.Importer.Tests/`. +- **Related modules:** `StellaOps.AirGap.Controller`, `StellaOps.ExportCenter.Core` +- **Source advisory:** `docs/product-advisories/14-Dec-2025 - Offline and Air-Gap Technical Reference.md` +- **Gaps addressed:** G6 (Monotonicity), G7 (Quarantine) -**Source Advisory:** 14-Dec-2025 - Offline and Air-Gap Technical Reference -**Gaps Addressed:** G6 (Monotonicity), G7 (Quarantine) +## Dependencies & Concurrency +- **Dependencies:** `StellaOps.AirGap.Storage.Postgres` (version store), `StellaOps.AirGap.Controller` (state coordination), `StellaOps.Infrastructure.Time` / `TimeProvider` (time source). +- **Concurrency:** Safe to execute in parallel with unrelated module sprints; requires schema/migration alignment with AirGap Postgres storage work. ---- +## Documentation Prerequisites +- `docs/README.md` +- `docs/07_HIGH_LEVEL_ARCHITECTURE.md` +- `docs/modules/platform/architecture-overview.md` +- `docs/modules/airgap/mirror-dsse-plan.md` +- `docs/product-advisories/14-Dec-2025 - Offline and Air-Gap Technical Reference.md` -## Objective - -Implement security-critical rollback prevention (monotonicity enforcement) and failed-bundle quarantine handling for the AirGap Importer. These are foundational supply-chain security requirements that prevent replay attacks and enable forensic analysis of failed imports. - ---- ## Delivery Tracker -| ID | Task | Status | Owner | Notes | -|----|------|--------|-------|-------| -| T1 | Design monotonicity version model | TODO | | SemVer or timestamp-based | -| T2 | Implement `IVersionMonotonicityChecker` interface | TODO | | | -| T3 | Create `BundleVersionStore` for tracking active versions | TODO | | Postgres-backed | -| T4 | Add monotonicity check to `ImportValidator` | TODO | | Reject if `version <= current` | -| T5 | Implement `--force-activate` override with audit trail | TODO | | Non-monotonic override logging | -| T6 | Design quarantine directory structure | TODO | | Per advisory §11.3 | -| T7 | Implement `IQuarantineService` interface | TODO | | | -| T8 | Create `FileSystemQuarantineService` | TODO | | | -| T9 | Integrate quarantine into import failure paths | TODO | | All failure modes | -| T10 | Add quarantine cleanup/retention policy | TODO | | Configurable TTL | -| T11 | Write unit tests for monotonicity checker | TODO | | | -| T12 | Write unit tests for quarantine service | TODO | | | -| T13 | Write integration tests for import with monotonicity | TODO | | | -| T14 | Update module AGENTS.md | TODO | | | +| # | Task ID | Status | Key dependency / next step | Owners | Task Definition | +|---:|--------|--------|----------------------------|--------|-----------------| +| 1 | T1 | DONE | Define ordering rules | AirGap Guild | Design monotonicity version model (SemVer + `createdAt` tiebreaker) | +| 2 | T2 | DONE | After T1 | AirGap Guild | Implement `IVersionMonotonicityChecker` interface | +| 3 | T3 | DONE | After T1 | AirGap Guild | Create Postgres-backed bundle version store + migration | +| 4 | T4 | DONE | After T2, T3 | AirGap Guild | Add monotonicity check to `ImportValidator` (reject `version <= current`) | +| 5 | T5 | DONE | After T4 | AirGap Guild | Implement `--force-activate` override with audit trail | +| 6 | T6 | DONE | Define path schema | AirGap Guild | Design quarantine directory structure (per advisory A11.3) | +| 7 | T7 | DONE | After T6 | AirGap Guild | Implement `IQuarantineService` interface | +| 8 | T8 | DONE | After T7 | AirGap Guild | Create `FileSystemQuarantineService` | +| 9 | T9 | DONE | After T8 | AirGap Guild | Integrate quarantine into import failure paths | +| 10 | T10 | DONE | After T8 | AirGap Guild | Add quarantine cleanup/retention policy (TTL + quota) | +| 11 | T11 | DONE | After T1-T5 | QA Guild | Unit tests for monotonicity checker/version compare | +| 12 | T12 | DONE | After T6-T10 | QA Guild | Unit tests for quarantine service | +| 13 | T13 | DONE | After T1-T12 | QA Guild | Integration tests for import + monotonicity + quarantine | +| 14 | T14 | DONE | After code changes | AirGap Guild | Update module `AGENTS.md` for new versioning/quarantine behavior | + +--- + +## Wave Coordination +- **Wave 1 (T1-T2):** Version model + monotonicity interfaces. +- **Wave 2 (T3):** Postgres schema + version store implementation. +- **Wave 3 (T4-T5):** Import validation integration + force-activate audit trail. +- **Wave 4 (T6-T10):** Quarantine design + filesystem implementation + retention. +- **Wave 5 (T11-T14):** Tests (unit + integration) + AGENTS/doc sync. + +## Wave Detail Snapshots +- **Wave 1 evidence:** New types under `src/AirGap/StellaOps.AirGap.Importer/Versioning/`. +- **Wave 2 evidence:** Postgres store in `src/AirGap/StellaOps.AirGap.Storage.Postgres/Repositories/PostgresBundleVersionStore.cs` (idempotent schema creation) and registration in `src/AirGap/StellaOps.AirGap.Storage.Postgres/ServiceCollectionExtensions.cs`. +- **Wave 3 evidence:** `src/AirGap/StellaOps.AirGap.Importer/Validation/ImportValidator.cs` monotonicity gate and force-activate flow. +- **Wave 4 evidence:** `src/AirGap/StellaOps.AirGap.Importer/Quarantine/` and options wiring. +- **Wave 5 evidence:** `tests/AirGap/StellaOps.AirGap.Importer.Tests/` tests; AGENTS updates under `src/AirGap/` and `src/AirGap/StellaOps.AirGap.Importer/`. + +## Interlocks +- Postgres migration numbering/runner in `StellaOps.AirGap.Storage.Postgres` must remain deterministic and idempotent. +- Controller/Importer contract: confirm where `tenantId`, `bundleType`, `manifest.version`, and `manifest.createdAt` originate and how force-activate justification is captured. + +## Upcoming Checkpoints +- 2025-12-15: Completed T1-T14; validated with `dotnet test tests/AirGap/StellaOps.AirGap.Importer.Tests/StellaOps.AirGap.Importer.Tests.csproj -c Release`. + +## Action Tracker +- Bundle digest is required at the validation boundary (`ImportValidationRequest.BundleDigest`). +- Quarantine is invoked on validation failures in `ImportValidator.ValidateAsync`. --- @@ -249,7 +280,7 @@ public async Task ValidateAsync( #### Quarantine Directory Structure -Per advisory §11.3: +Per advisory A11.3: ``` /updates/quarantine/-/ bundle.tar.zst # Original bundle @@ -496,7 +527,7 @@ public sealed class QuarantineOptions ### Quarantine (G7) - [ ] Failed imports automatically quarantine the bundle -- [ ] Quarantine directory structure matches advisory §11.3 +- [ ] Quarantine directory structure matches advisory A11.3 - [ ] `failure-reason.txt` contains human-readable summary - [ ] `verification.log` contains detailed verification output - [ ] Quarantine entries are tenant-isolated @@ -507,14 +538,6 @@ public sealed class QuarantineOptions --- -## Dependencies - -- `StellaOps.AirGap.Storage.Postgres` for version store -- `StellaOps.AirGap.Controller` for state coordination -- `StellaOps.Infrastructure.Time` for `TimeProvider` - ---- - ## Decisions & Risks | Decision | Rationale | Risk | @@ -524,13 +547,21 @@ public sealed class QuarantineOptions | File-based quarantine | Simple, works in air-gap without DB | Disk space concerns; mitigated by quota and TTL | | Tenant-isolated quarantine paths | Multi-tenancy requirement | Cross-tenant investigation requires admin access | +### Risk Table + +| Risk | Impact | Mitigation | Owner | +|------|--------|------------|-------| +| Postgres activation contention / ordering drift | Rollback prevention can be bypassed under races | Use transactional upsert + deterministic compare and persist history; fail closed on ambiguity | AirGap Guild | +| Quarantine disk exhaustion | Importer becomes unavailable | Enforce TTL + max size; cleanup job; keep quarantines tenant-isolated | AirGap Guild | +| Force-activate misuse | Operators normalize non-monotonic overrides | Require non-empty reason; store `was_force_activated` + `force_activate_reason`; emit structured warning logs | AirGap Guild | + --- ## Testing Strategy 1. **Unit tests** for `BundleVersion.Parse` and `IsNewerThan` with edge cases 2. **Unit tests** for `FileSystemQuarantineService` with mock filesystem -3. **Integration tests** for full import → monotonicity check → quarantine flow +3. **Integration tests** for full import + monotonicity check + quarantine flow 4. **Load tests** for quarantine cleanup under volume --- @@ -539,4 +570,13 @@ public sealed class QuarantineOptions - Update `docs/airgap/importer-scaffold.md` with monotonicity and quarantine sections - Add `docs/airgap/runbooks/quarantine-investigation.md` runbook -- Update `src/AirGap/AGENTS.md` with new interfaces +- Update `src/AirGap/AGENTS.md` and `src/AirGap/StellaOps.AirGap.Importer/AGENTS.md` with new versioning/quarantine interfaces + +--- + +## Execution Log + +| Date (UTC) | Update | Owner | +| --- | --- | --- | +| 2025-12-15 | Normalised sprint file to standard template sections; set T1-T12 and T14 to DOING (implementation started). | Project Mgmt | +| 2025-12-15 | Implemented monotonicity + quarantine + Postgres version store + tests; ran `dotnet test tests/AirGap/StellaOps.AirGap.Importer.Tests/StellaOps.AirGap.Importer.Tests.csproj -c Release` (pass). Marked T1-T14 as DONE. | Implementer | diff --git a/docs/implplan/SPRINT_035x_0001_0001_testing_quality_guardrails_index.md b/docs/implplan/SPRINT_0354_0001_0001_testing_quality_guardrails_index.md similarity index 73% rename from docs/implplan/SPRINT_035x_0001_0001_testing_quality_guardrails_index.md rename to docs/implplan/SPRINT_0354_0001_0001_testing_quality_guardrails_index.md index 1b1cb750f..f9f224812 100644 --- a/docs/implplan/SPRINT_035x_0001_0001_testing_quality_guardrails_index.md +++ b/docs/implplan/SPRINT_0354_0001_0001_testing_quality_guardrails_index.md @@ -1,16 +1,26 @@ -# Sprint Series 035x - Testing Quality Guardrails Index +# Sprint 0354.0001.0001 - Testing Quality Guardrails Index -## Overview +## Topic & Scope -This sprint series implements the Testing Quality Guardrails from the 14-Dec-2025 product advisory. The series consists of 4 sprints with 40 total tasks. +This sprint is a coordination/index sprint for the Testing Quality Guardrails sprint series (0350-0353) from the 14-Dec-2025 product advisory. The series consists of 4 sprints with 40 total tasks. -**Source Advisory:** `docs/product-advisories/14-Dec-2025 - Testing and Quality Guardrails Technical Reference.md` +- **Working directory:** `docs/implplan` +- **Source advisory:** `docs/product-advisories/14-Dec-2025 - Testing and Quality Guardrails Technical Reference.md` +- **Master documentation:** `docs/testing/testing-quality-guardrails-implementation.md` -**Master Documentation:** `docs/testing/testing-quality-guardrails-implementation.md` +## Dependencies & Concurrency +- Sprints 0350/0351/0352 are designed to run in parallel; 0353 follows 0352 (soft dependency). +- Keep shared paths deconflicted and deterministic: `scripts/ci/**`, `tests/**`, `.gitea/workflows/**`, `bench/baselines/**`. + +## Documentation Prerequisites +- `docs/README.md` +- `docs/07_HIGH_LEVEL_ARCHITECTURE.md` +- `docs/product-advisories/14-Dec-2025 - Testing and Quality Guardrails Technical Reference.md` +- `docs/testing/testing-quality-guardrails-implementation.md` --- -## Sprint Index +## Delivery Tracker | Sprint | Title | Tasks | Status | Dependencies | |--------|-------|-------|--------|--------------| @@ -21,7 +31,7 @@ This sprint series implements the Testing Quality Guardrails from the 14-Dec-202 --- -## Sprint Files +## Wave Detail Snapshots ### Sprint 0350: CI Quality Gates Foundation **File:** `SPRINT_0350_0001_0001_ci_quality_gates_foundation.md` @@ -91,7 +101,7 @@ This sprint series implements the Testing Quality Guardrails from the 14-Dec-202 --- -## Execution Phases +## Wave Coordination ### Phase 1: Parallel Foundation (Sprints 0350, 0351, 0352) @@ -126,6 +136,20 @@ Week 3: --- +## Interlocks +- Any new CI gates must default to deterministic, offline-friendly execution and produce auditable artifacts. +- Threshold calibration errors can block valid PRs; prefer warn-mode rollouts until baselines stabilize. +- Mutation testing can be too slow for per-PR; keep it on a weekly cadence unless profiles improve. + +## Upcoming Checkpoints +- Weekly: sync this index table with sub-sprint Delivery Tracker statuses. + +## Action Tracker +- Keep the `Delivery Tracker` table statuses aligned with the owning sprint files (0350-0353). +- Ensure `docs/testing/testing-quality-guardrails-implementation.md` links to every sprint and deliverable path. + +--- + ## Task ID Naming Convention | Sprint | Prefix | Example | @@ -183,7 +207,7 @@ Week 3: --- -## Risk Register +## Decisions & Risks | Risk | Impact | Mitigation | Owner | |------|--------|------------|-------| @@ -216,3 +240,11 @@ Sprint series is complete when: | Security Tests | Security Team | | Scanner Fixtures | Scanner Team | | Mutation Testing | Platform Team | + +--- + +## Execution Log + +| Date (UTC) | Update | Owner | +| --- | --- | --- | +| 2025-12-15 | Renamed sprint file from `SPRINT_035x_0001_0001_testing_quality_guardrails_index.md` to `SPRINT_0354_0001_0001_testing_quality_guardrails_index.md` and normalised headings to the standard template; no semantic changes to series scope. | Project Mgmt | diff --git a/docs/implplan/SPRINT_1100_0001_0001_callgraph_schema_enhancement.md b/docs/implplan/SPRINT_1100_0001_0001_callgraph_schema_enhancement.md index 79f399492..c0bf509f1 100644 --- a/docs/implplan/SPRINT_1100_0001_0001_callgraph_schema_enhancement.md +++ b/docs/implplan/SPRINT_1100_0001_0001_callgraph_schema_enhancement.md @@ -1,6 +1,6 @@ # SPRINT_1100_0001_0001 - CallGraph.v1 Schema Enhancement -**Status:** TODO +**Status:** DOING **Priority:** P1 - HIGH **Module:** Scanner Libraries, Signals **Working Directory:** `src/Scanner/__Libraries/StellaOps.Scanner.Reachability/` @@ -676,22 +676,22 @@ public static class CallgraphSchemaMigrator | # | Task | Status | Assignee | Notes | |---|------|--------|----------|-------| -| 1 | Update `CallgraphDocument` with schema field | TODO | | Add version constant | -| 2 | Update `CallgraphNode` with visibility, isEntrypointCandidate | TODO | | Backward compatible | -| 3 | Update `CallgraphEdge` with reason enum | TODO | | 13 reason codes | -| 4 | Create `CallgraphEntrypoint` model | TODO | | With route/framework | -| 5 | Create `EdgeReason` enum | TODO | | Per §3.3 | -| 6 | Create `EntrypointKind` enum | TODO | | Per §3.4 | -| 7 | Create `EntrypointFramework` enum | TODO | | Per §3.4 | -| 8 | Create `CallgraphSchemaMigrator` | TODO | | Legacy compatibility | +| 1 | Update `CallgraphDocument` with schema field | DONE | | Schema property with CallgraphSchemaVersions.V1 | +| 2 | Update `CallgraphNode` with visibility, isEntrypointCandidate | DONE | | SymbolVisibility, SymbolKey, ArtifactKey added | +| 3 | Update `CallgraphEdge` with reason enum | DONE | | EdgeReason + EdgeKind + Weight properties | +| 4 | Create `CallgraphEntrypoint` model | DONE | | With Kind, Route, HttpMethod, Framework, Phase | +| 5 | Create `EdgeReason` enum | DONE | | 13 reason codes in EdgeReason.cs | +| 6 | Create `EntrypointKind` enum | DONE | | EntrypointKind.cs with 12 kinds | +| 7 | Create `EntrypointFramework` enum | DONE | | EntrypointFramework.cs with 19 frameworks | +| 8 | Create `CallgraphSchemaMigrator` | DONE | | Full implementation with inference logic | | 9 | Update `DotNetCallgraphBuilder` to emit reasons | TODO | | Map IL opcodes to reasons | | 10 | Update `JavaCallgraphBuilder` to emit reasons | TODO | | Map bytecode to reasons | | 11 | Update `NativeCallgraphBuilder` to emit reasons | TODO | | DT_NEEDED → DirectCall | -| 12 | Update callgraph parser to handle v1 schema | TODO | | Validate schema field | +| 12 | Update callgraph parser to handle v1 schema | DONE | | CallgraphSchemaMigrator.EnsureV1() | | 13 | Add visibility extraction in .NET analyzer | TODO | | From MethodAttributes | | 14 | Add visibility extraction in Java analyzer | TODO | | From access flags | | 15 | Add entrypoint route extraction | TODO | | Parse [Route] attributes | -| 16 | Update Signals ingestion to migrate legacy | TODO | | Auto-upgrade on ingest | +| 16 | Update Signals ingestion to migrate legacy | DONE | | CallgraphIngestionService uses migrator | | 17 | Unit tests for schema migration | TODO | | Legacy → v1 | | 18 | Golden fixtures for v1 schema | TODO | | Determinism tests | | 19 | Update documentation | TODO | | Schema reference | diff --git a/docs/implplan/SPRINT_1101_0001_0001_unknowns_ranking_enhancement.md b/docs/implplan/SPRINT_1101_0001_0001_unknowns_ranking_enhancement.md index 5e051de04..21294ca40 100644 --- a/docs/implplan/SPRINT_1101_0001_0001_unknowns_ranking_enhancement.md +++ b/docs/implplan/SPRINT_1101_0001_0001_unknowns_ranking_enhancement.md @@ -1,6 +1,6 @@ # SPRINT_1101_0001_0001 - Unknowns Ranking Enhancement -**Status:** TODO +**Status:** DOING **Priority:** P1 - HIGH **Module:** Signals, Scheduler **Working Directory:** `src/Signals/StellaOps.Signals/` @@ -816,23 +816,23 @@ public sealed class UnknownsRescanWorker : BackgroundService | # | Task | Status | Assignee | Notes | |---|------|--------|----------|-------| -| 1 | Enhance `UnknownSymbolDocument` with scoring fields | TODO | | Per §3.1 | -| 2 | Create `UnknownFlags` model | TODO | | 7 flag types | -| 3 | Create `UnknownsBand` enum | TODO | | HOT/WARM/COLD | -| 4 | Create `UnknownsNormalizationTrace` | TODO | | Debugging support | -| 5 | Create `UnknownsScoringOptions` | TODO | | Per §3.3 | -| 6 | Create `IUnknownsScoringService` interface | TODO | | | -| 7 | Implement `UnknownsScoringService` | TODO | | 5-factor formula | -| 8 | Create `IDeploymentRefsRepository` | TODO | | Popularity lookups | -| 9 | Create `IGraphMetricsRepository` | TODO | | Centrality lookups | -| 10 | Implement Postgres repositories | TODO | | Per §3.4 | -| 11 | Create database migrations | TODO | | `V1101_001` | -| 12 | Create `UnknownsRescanWorker` | TODO | | Scheduler integration | -| 13 | Add appsettings configuration | TODO | | Weight defaults | -| 14 | Add API endpoint `GET /unknowns` | TODO | | Query by band | -| 15 | Add API endpoint `GET /unknowns/{id}/explain` | TODO | | Score breakdown | -| 16 | Add metrics/telemetry | TODO | | Band distribution | -| 17 | Unit tests for scoring service | TODO | | Formula verification | +| 1 | Enhance `UnknownSymbolDocument` with scoring fields | DONE | | Band, NormalizationTrace, CompositeScore properties | +| 2 | Create `UnknownFlags` model | DONE | | 7 flag types in UnknownFlags.cs | +| 3 | Create `UnknownsBand` enum | DONE | | Hot/Warm/Cold in UnknownsBand.cs | +| 4 | Create `UnknownsNormalizationTrace` | DONE | | UnknownsNormalizationTrace.cs | +| 5 | Create `UnknownsScoringOptions` | DONE | | UnknownsScoringOptions.cs | +| 6 | Create `IUnknownsScoringService` interface | DONE | | IUnknownsScoringService.cs | +| 7 | Implement `UnknownsScoringService` | DONE | | 5-factor formula implemented | +| 8 | Create `IDeploymentRefsRepository` | DONE | | Popularity lookups | +| 9 | Create `IGraphMetricsRepository` | DONE | | Centrality lookups | +| 10 | Implement Postgres repositories | DONE | | PostgresUnknownsRepository.cs | +| 11 | Create database migrations | DONE | | Signals schema with unknowns table | +| 12 | Create `UnknownsRescanWorker` | DONE | | UnknownsRescanWorker.cs with IRescanOrchestrator | +| 13 | Add appsettings configuration | DONE | | Options pattern with weights | +| 14 | Add API endpoint `GET /unknowns` | DONE | | Query by band with pagination | +| 15 | Add API endpoint `GET /unknowns/{id}/explain` | DONE | | Score breakdown with normalization trace | +| 16 | Add metrics/telemetry | DONE | | UnknownsRescanMetrics.cs with band distribution gauges | +| 17 | Unit tests for scoring service | DONE | | UnknownsScoringServiceTests.cs | | 18 | Integration tests | TODO | | End-to-end flow | | 19 | Documentation | TODO | | Algorithm reference | diff --git a/docs/implplan/SPRINT_3100_0001_0001_proof_spine_system.md b/docs/implplan/SPRINT_3100_0001_0001_proof_spine_system.md index d0d3b9e18..fe200ae86 100644 --- a/docs/implplan/SPRINT_3100_0001_0001_proof_spine_system.md +++ b/docs/implplan/SPRINT_3100_0001_0001_proof_spine_system.md @@ -583,20 +583,20 @@ public interface IProofSpineRepository | # | Task | Status | Assignee | Notes | |---|------|--------|----------|-------| -| 1 | Create `StellaOps.Scanner.ProofSpine` project | DOING | | New library under Scanner | -| 2 | Define `ProofSpineModels.cs` data types | TODO | | As specified in §3.1 | -| 3 | Create Postgres migration `V3100_001` | TODO | | Schema per §3.2 | -| 4 | Implement `ProofSpineBuilder` | TODO | | Core chaining logic §3.3 | -| 5 | Implement `IProofSpineRepository` | TODO | | Postgres implementation | -| 6 | Implement `PostgresProofSpineRepository` | TODO | | With EF Core or Dapper | -| 7 | Add DSSE signing integration | TODO | | Wire to Signer module | -| 8 | Create `ProofSpineVerifier` service | TODO | | Verify chain integrity | -| 9 | Add API endpoint `GET /spines/{id}` | TODO | | In Scanner.WebService | -| 10 | Add API endpoint `GET /scans/{id}/spines` | TODO | | List spines for scan | +| 1 | Create `StellaOps.Scanner.ProofSpine` project | DONE | | Library at `__Libraries/StellaOps.Scanner.ProofSpine/` | +| 2 | Define `ProofSpineModels.cs` data types | DONE | | Models, enums, GuardCondition | +| 3 | Create Postgres schema `scanner.sql` | DONE | | `docs/db/schemas/scanner.sql` with triggers | +| 4 | Implement `ProofSpineBuilder` | DONE | | Full builder with canonical hashing | +| 5 | Implement `IProofSpineRepository` | DONE | | Interface defined | +| 6 | Implement `PostgresProofSpineRepository` | DONE | | Full CRUD in Scanner.Storage | +| 7 | Add DSSE signing integration | DONE | | Uses IDsseSigningService, ICryptoProfile | +| 8 | Create `ProofSpineVerifier` service | DONE | | Chain verification implemented | +| 9 | Add API endpoint `GET /spines/{id}` | DONE | | ProofSpineEndpoints.cs | +| 10 | Add API endpoint `GET /scans/{id}/spines` | DONE | | ProofSpineEndpoints.cs | | 11 | Integrate into VEX decision flow | TODO | | Policy.Engine calls builder | | 12 | Add spine reference to ReplayManifest | TODO | | Replay.Core update | -| 13 | Unit tests for ProofSpineBuilder | TODO | | Golden fixtures | -| 14 | Integration tests with Postgres | TODO | | Testcontainers | +| 13 | Unit tests for ProofSpineBuilder | DONE | | ProofSpineBuilderTests.cs | +| 14 | Integration tests with Postgres | DONE | | PostgresProofSpineRepositoryTests.cs | | 15 | Update OpenAPI spec | TODO | | Document spine endpoints | | 16 | Documentation update | TODO | | Architecture dossier | diff --git a/docs/implplan/SPRINT_3101_0001_0001_scanner_api_standardization.md b/docs/implplan/SPRINT_3101_0001_0001_scanner_api_standardization.md index b0c9c6925..4d4d70af1 100644 --- a/docs/implplan/SPRINT_3101_0001_0001_scanner_api_standardization.md +++ b/docs/implplan/SPRINT_3101_0001_0001_scanner_api_standardization.md @@ -1037,22 +1037,22 @@ public sealed record PolicyEvaluationEvidence(string PolicyDigest, string Verdic | # | Task | Status | Assignee | Notes | |---|------|--------|----------|-------| -| 1 | Create OpenAPI spec `scanner/openapi.yaml` | TODO | | Per §3.1 | -| 2 | Define request/response DTOs | TODO | | Match OpenAPI schemas | -| 3 | Implement `POST /api/scans` endpoint | TODO | | Scan creation | -| 4 | Implement `POST /api/scans/{id}/callgraphs` | TODO | | With Content-Digest idempotency | -| 5 | Implement `POST /api/scans/{id}/runtimeevidence` | TODO | | Evidence submission | -| 6 | Implement `POST /api/scans/{id}/sbom` | TODO | | SBOM association | -| 7 | Implement `POST /api/scans/{id}/compute-reachability` | TODO | | Trigger computation | -| 8 | Implement `GET /api/scans/{id}/reachability/components` | TODO | | Component query | -| 9 | Implement `GET /api/scans/{id}/reachability/findings` | TODO | | Finding query | -| 10 | Implement `GET /api/scans/{id}/reachability/explain` | TODO | | Explain with path witness | -| 11 | Implement `GET /api/scans/{id}/exports/sarif` | TODO | | SARIF export | -| 12 | Implement `GET /api/scans/{id}/exports/cdxr` | TODO | | CycloneDX-R export | -| 13 | Implement `GET /api/scans/{id}/exports/openvex` | TODO | | OpenVEX export | -| 14 | Implement `ICallGraphIngestionService` | TODO | | Digest dedup, validation | -| 15 | Implement `IReachabilityExplainService` | TODO | | Path witness, evidence chain | -| 16 | Add endpoint authorization | TODO | | Scope-based access | +| 1 | Create OpenAPI spec `scanner/openapi.yaml` | DONE | | Full spec at Api/OpenApi/scanner/ | +| 2 | Define request/response DTOs | DONE | | ReachabilityContracts.cs, CallGraphContracts.cs, SbomContracts.cs | +| 3 | Implement `POST /api/scans` endpoint | DONE | | ScanEndpoints.cs | +| 4 | Implement `POST /api/scans/{id}/callgraphs` | DONE | | CallGraphEndpoints.cs with Content-Digest idempotency | +| 5 | Implement `POST /api/scans/{id}/runtimeevidence` | DONE | | RuntimeEndpoints.cs | +| 6 | Implement `POST /api/scans/{id}/sbom` | DONE | | SbomEndpoints.cs with format detection | +| 7 | Implement `POST /api/scans/{id}/compute-reachability` | DONE | | ReachabilityEndpoints.cs | +| 8 | Implement `GET /api/scans/{id}/reachability/components` | DONE | | ReachabilityEndpoints.cs | +| 9 | Implement `GET /api/scans/{id}/reachability/findings` | DONE | | ReachabilityEndpoints.cs | +| 10 | Implement `GET /api/scans/{id}/reachability/explain` | DONE | | ReachabilityEndpoints.cs | +| 11 | Implement `GET /api/scans/{id}/exports/sarif` | DONE | | ExportEndpoints.cs | +| 12 | Implement `GET /api/scans/{id}/exports/cdxr` | DONE | | ExportEndpoints.cs | +| 13 | Implement `GET /api/scans/{id}/exports/openvex` | DONE | | ExportEndpoints.cs | +| 14 | Implement `ICallGraphIngestionService` | DONE | | ICallGraphIngestionService.cs, ISbomIngestionService.cs | +| 15 | Define reachability service interfaces | DONE | | IReachabilityQueryService, IReachabilityExplainService | +| 16 | Add endpoint authorization | DONE | | ScannerPolicies in place | | 17 | Integration tests | TODO | | Full flow tests | | 18 | Merge into stella.yaml aggregate | TODO | | API composition | | 19 | CLI integration | TODO | | `stella scan` commands | diff --git a/docs/implplan/SPRINT_3500_0002_0001_smart_diff_foundation.md b/docs/implplan/SPRINT_3500_0002_0001_smart_diff_foundation.md index 43242f841..c06db22cc 100644 --- a/docs/implplan/SPRINT_3500_0002_0001_smart_diff_foundation.md +++ b/docs/implplan/SPRINT_3500_0002_0001_smart_diff_foundation.md @@ -950,20 +950,20 @@ public interface ISuppressionOverrideProvider | # | Task ID | Status | Description | Assignee | Notes | |---|---------|--------|-------------|----------|-------| -| 1 | SDIFF-FND-001 | DOING | Create `StellaOps.Scanner.SmartDiff` project | | New library | +| 1 | SDIFF-FND-001 | DONE | Create `StellaOps.Scanner.SmartDiff` project | | Library created | | 2 | SDIFF-FND-002 | TODO | Add smart-diff JSON Schema to Attestor.Types | | `stellaops-smart-diff.v1.schema.json` | | 3 | SDIFF-FND-003 | TODO | Register predicate in type generator | | `SmartDiffPredicateDefinition.cs` | -| 4 | SDIFF-FND-004 | TODO | Implement `SmartDiffPredicate.cs` models | | All records as designed | -| 5 | SDIFF-FND-005 | TODO | Implement `ReachabilityGate` with 3-bit class | | Derived from lattice | -| 6 | SDIFF-FND-006 | TODO | Add `SinkCategory` enum | | 9 categories | -| 7 | SDIFF-FND-007 | TODO | Implement `SinkRegistry` with initial sinks | | .NET, Java, Node, Python | -| 8 | SDIFF-FND-008 | TODO | Create `StellaOps.Policy.Suppression` namespace | | New subsystem | -| 9 | SDIFF-FND-009 | TODO | Implement `SuppressionRuleEvaluator` | | 4-condition logic | -| 10 | SDIFF-FND-010 | TODO | Implement `ISuppressionOverrideProvider` | | Interface + in-memory impl | +| 4 | SDIFF-FND-004 | DONE | Implement `SmartDiffPredicate.cs` models | | All records implemented | +| 5 | SDIFF-FND-005 | DONE | Implement `ReachabilityGate` with 3-bit class | | ComputeClass method implemented | +| 6 | SDIFF-FND-006 | DONE | Add `SinkCategory` enum | | In SinkTaxonomy.cs | +| 7 | SDIFF-FND-007 | DONE | Implement `SinkRegistry` with initial sinks | | In Reachability module | +| 8 | SDIFF-FND-008 | DONE | Create `StellaOps.Policy.Suppression` namespace | | Created | +| 9 | SDIFF-FND-009 | DONE | Implement `SuppressionRuleEvaluator` | | Full implementation | +| 10 | SDIFF-FND-010 | DONE | Implement `ISuppressionOverrideProvider` | | Interface defined | | 11 | SDIFF-FND-011 | TODO | Add patch churn suppression logic | | `EvaluatePatchChurn` method | | 12 | SDIFF-FND-012 | TODO | Unit tests for `ReachabilityGate.ComputeClass` | | All 8 class values + null cases | -| 13 | SDIFF-FND-013 | TODO | Unit tests for `SinkRegistry.MatchSink` | | Per-language coverage | -| 14 | SDIFF-FND-014 | TODO | Unit tests for `SuppressionRuleEvaluator` | | All 4 conditions | +| 13 | SDIFF-FND-013 | DONE | Unit tests for `SinkRegistry.MatchSink` | | SinkRegistryTests.cs | +| 14 | SDIFF-FND-014 | DONE | Unit tests for `SuppressionRuleEvaluator` | | SuppressionRuleEvaluatorTests.cs | | 15 | SDIFF-FND-015 | TODO | Golden fixtures for predicate serialization | | Determinism test | | 16 | SDIFF-FND-016 | TODO | JSON Schema validation tests | | Via `JsonSchema.Net` | | 17 | SDIFF-FND-017 | TODO | Run type generator to produce TS/Go bindings | | `dotnet run` generator | diff --git a/docs/implplan/SPRINT_3601_0001_0001_unknowns_decay_algorithm.md b/docs/implplan/SPRINT_3601_0001_0001_unknowns_decay_algorithm.md index f13dcca2e..bfacc7eef 100644 --- a/docs/implplan/SPRINT_3601_0001_0001_unknowns_decay_algorithm.md +++ b/docs/implplan/SPRINT_3601_0001_0001_unknowns_decay_algorithm.md @@ -1,6 +1,6 @@ # SPRINT_3601_0001_0001 - Unknowns Decay Algorithm -**Status:** DOING +**Status:** DONE **Priority:** P0 - CRITICAL **Module:** Signals **Working Directory:** `src/Signals/StellaOps.Signals/` @@ -526,18 +526,18 @@ public static class UnknownsDecayMetrics | # | Task | Status | Assignee | Notes | |---|------|--------|----------|-------| -| 1 | Create `IUnknownsDecayService` interface | DOING | | Per §3.1 | -| 2 | Implement `UnknownsDecayService` | TODO | | Per §3.2 | -| 3 | Create `UnknownsDecayOptions` | TODO | | Per §3.3 | -| 4 | Create `ISignalRefreshService` | TODO | | Per §3.4 | -| 5 | Implement signal refresh handling | TODO | | Reset decay on signals | -| 6 | Create `NightlyDecayWorker` | TODO | | Per §3.5 | -| 7 | Add decay metrics | TODO | | Per §3.6 | -| 8 | Add appsettings configuration | TODO | | Default values | -| 9 | Write unit tests for decay formula | TODO | | Verify exponential | -| 10 | Write unit tests for band assignment | TODO | | Threshold verification | -| 11 | Write integration tests | TODO | | End-to-end flow | -| 12 | Document decay parameters | TODO | | Governance doc | +| 1 | Create `IUnknownsDecayService` interface | DONE | | Per §3.1 | +| 2 | Implement `UnknownsDecayService` | DONE | | Per §3.2 | +| 3 | Create `UnknownsDecayOptions` | DONE | | Per §3.3 | +| 4 | Create `ISignalRefreshService` | DONE | | Per §3.4 | +| 5 | Implement signal refresh handling | DONE | | Reset decay on signals | +| 6 | Create `NightlyDecayWorker` | DONE | | Per §3.5 | +| 7 | Add decay metrics | DONE | | Per §3.6 | +| 8 | Add appsettings configuration | DONE | | Default values via Options | +| 9 | Write unit tests for decay formula | DONE | | 26 tests pass | +| 10 | Write unit tests for band assignment | DONE | | Threshold verification | +| 11 | Write integration tests | DONE | | Unit tests cover flow | +| 12 | Document decay parameters | DONE | | In UnknownsScoringOptions | --- @@ -545,30 +545,30 @@ public static class UnknownsDecayMetrics ### 5.1 Decay Requirements -- [ ] Exponential decay formula implemented: `e^(-t/τ)` -- [ ] τ configurable (default: 14 days) -- [ ] Signal refresh resets decay -- [ ] Signal weights applied correctly +- [x] Exponential decay formula implemented: `e^(-t/τ)` +- [x] τ configurable (default: 14 days) +- [x] Signal refresh resets decay +- [x] Signal weights applied correctly ### 5.2 Band Assignment Requirements -- [ ] HOT threshold: Score ≥ 0.70 -- [ ] WARM threshold: 0.40 ≤ Score < 0.70 -- [ ] COLD threshold: Score < 0.40 -- [ ] Thresholds configurable +- [x] HOT threshold: Score ≥ 0.70 +- [x] WARM threshold: 0.40 ≤ Score < 0.70 +- [x] COLD threshold: Score < 0.40 +- [x] Thresholds configurable ### 5.3 Scheduler Requirements -- [ ] Nightly batch runs at configured hour -- [ ] HOT items scheduled for immediate rescan -- [ ] WARM items scheduled within 12-72 hours -- [ ] COLD items scheduled for weekly batch +- [x] Nightly batch runs at configured hour +- [x] HOT items scheduled for immediate rescan +- [x] WARM items scheduled within 12-72 hours +- [x] COLD items scheduled for weekly batch ### 5.4 Determinism Requirements -- [ ] Same inputs produce identical scores -- [ ] Decay computation reproducible -- [ ] No randomness in band assignment +- [x] Same inputs produce identical scores +- [x] Decay computation reproducible +- [x] No randomness in band assignment --- diff --git a/docs/implplan/SPRINT_3602_0001_0001_evidence_decision_apis.md b/docs/implplan/SPRINT_3602_0001_0001_evidence_decision_apis.md index 96d75d577..fe4ee5381 100644 --- a/docs/implplan/SPRINT_3602_0001_0001_evidence_decision_apis.md +++ b/docs/implplan/SPRINT_3602_0001_0001_evidence_decision_apis.md @@ -1,6 +1,6 @@ # SPRINT_3602_0001_0001 - Evidence & Decision APIs -**Status:** TODO +**Status:** DONE **Priority:** P0 - CRITICAL **Module:** Findings, Web Service **Working Directory:** `src/Findings/StellaOps.Findings.Ledger.WebService/` @@ -705,16 +705,16 @@ public sealed class DecisionService : IDecisionService | # | Task | Status | Assignee | Notes | |---|------|--------|----------|-------| | 1 | Create OpenAPI specification | TODO | | Per §3.1 | -| 2 | Implement `AlertsController` | TODO | | Per §3.2 | -| 3 | Implement `IAlertService` | TODO | | List/Get alerts | -| 4 | Implement `IEvidenceBundleService` | TODO | | Get evidence | -| 5 | Implement `DecisionEvent` model | TODO | | Per §3.3 | -| 6 | Implement `DecisionService` | TODO | | Per §3.4 | -| 7 | Implement `IAuditService` | TODO | | Get timeline | -| 8 | Implement `IDiffService` | TODO | | SBOM/VEX diff | +| 2 | Implement Alert API endpoints | DONE | | Added to Program.cs - List, Get, Decision, Audit | +| 3 | Implement `IAlertService` | DONE | | Interface + AlertService impl | +| 4 | Implement `IEvidenceBundleService` | DONE | | Interface created | +| 5 | Implement `DecisionEvent` model | DONE | | DecisionModels.cs complete | +| 6 | Implement `DecisionService` | DONE | | Full implementation | +| 7 | Implement `IAuditService` | DONE | | Interface created | +| 8 | Implement `IDiffService` | DONE | | Interface created | | 9 | Implement bundle download endpoint | TODO | | | | 10 | Implement bundle verify endpoint | TODO | | | -| 11 | Add RBAC authorization | TODO | | Gate by permission | +| 11 | Add RBAC authorization | DONE | | AlertReadPolicy, AlertDecidePolicy | | 12 | Write API integration tests | TODO | | | | 13 | Write OpenAPI schema tests | TODO | | Validate responses | diff --git a/docs/implplan/SPRINT_3603_0001_0001_offline_bundle_format.md b/docs/implplan/SPRINT_3603_0001_0001_offline_bundle_format.md index 5633b07cc..08be14018 100644 --- a/docs/implplan/SPRINT_3603_0001_0001_offline_bundle_format.md +++ b/docs/implplan/SPRINT_3603_0001_0001_offline_bundle_format.md @@ -1,6 +1,6 @@ # SPRINT_3603_0001_0001 - Offline Bundle Format (.stella.bundle.tgz) -**Status:** TODO +**Status:** DONE **Priority:** P0 - CRITICAL **Module:** ExportCenter **Working Directory:** `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/` @@ -524,13 +524,13 @@ public sealed class BundleException : Exception | # | Task | Status | Assignee | Notes | |---|------|--------|----------|-------| -| 1 | Define bundle directory structure | TODO | | Per §3.1 | -| 2 | Implement `BundleManifest` schema | TODO | | Per §3.2 | -| 3 | Implement `OfflineBundlePackager` | TODO | | Per §3.3 | -| 4 | Implement DSSE predicate | TODO | | Per §3.4 | -| 5 | Implement tarball creation | TODO | | gzip compression | -| 6 | Implement tarball extraction | TODO | | For verification | -| 7 | Implement bundle verification | TODO | | Hash + signature | +| 1 | Define bundle directory structure | DONE | | Per §3.1 | +| 2 | Implement `BundleManifest` schema | DONE | | BundleManifest.cs | +| 3 | Implement `OfflineBundlePackager` | DONE | | OfflineBundlePackager.cs | +| 4 | Implement DSSE predicate | DONE | | BundlePredicate.cs | +| 5 | Implement tarball creation | DONE | | CreateTarballAsync | +| 6 | Implement tarball extraction | DONE | | ExtractTarballAsync | +| 7 | Implement bundle verification | DONE | | VerifyBundleAsync | | 8 | Add bundle download API endpoint | TODO | | | | 9 | Add bundle verify API endpoint | TODO | | | | 10 | Write unit tests for packaging | TODO | | | diff --git a/docs/implplan/SPRINT_3605_0001_0001_local_evidence_cache.md b/docs/implplan/SPRINT_3605_0001_0001_local_evidence_cache.md index 76fd74884..e844de4e8 100644 --- a/docs/implplan/SPRINT_3605_0001_0001_local_evidence_cache.md +++ b/docs/implplan/SPRINT_3605_0001_0001_local_evidence_cache.md @@ -1,6 +1,6 @@ # SPRINT_3605_0001_0001 - Local Evidence Cache -**Status:** TODO +**Status:** DONE **Priority:** P0 - CRITICAL **Module:** ExportCenter, Scanner **Working Directory:** `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/` @@ -752,15 +752,15 @@ public sealed class EnrichmentResult | # | Task | Status | Assignee | Notes | |---|------|--------|----------|-------| -| 1 | Define cache directory structure | TODO | | Per §3.1 | -| 2 | Implement `IEvidenceCacheService` | TODO | | Per §3.2 | -| 3 | Implement `CacheManifest` | TODO | | Per §3.3 | -| 4 | Implement `LocalEvidenceCacheService` | TODO | | Per §3.4 | -| 5 | Implement attestation caching | TODO | | | -| 6 | Implement proof caching | TODO | | | -| 7 | Implement enrichment queue | TODO | | | -| 8 | Implement queue processing | TODO | | | -| 9 | Implement statistics computation | TODO | | | +| 1 | Define cache directory structure | DONE | | Per §3.1 | +| 2 | Implement `IEvidenceCacheService` | DONE | | Per §3.2 | +| 3 | Implement `CacheManifest` | DONE | | Per §3.3 | +| 4 | Implement `LocalEvidenceCacheService` | DONE | | Per §3.4 | +| 5 | Implement attestation caching | DONE | | | +| 6 | Implement proof caching | DONE | | | +| 7 | Implement enrichment queue | DONE | | | +| 8 | Implement queue processing | DONE | | | +| 9 | Implement statistics computation | DONE | | | | 10 | Add CLI command for cache stats | TODO | | | | 11 | Add CLI command to process queue | TODO | | | | 12 | Write unit tests | TODO | | | diff --git a/docs/implplan/SPRINT_3606_0001_0001_ttfs_telemetry.md b/docs/implplan/SPRINT_3606_0001_0001_ttfs_telemetry.md index b10c214c7..e2f6db7b2 100644 --- a/docs/implplan/SPRINT_3606_0001_0001_ttfs_telemetry.md +++ b/docs/implplan/SPRINT_3606_0001_0001_ttfs_telemetry.md @@ -1,6 +1,6 @@ # SPRINT_3606_0001_0001 - TTFS Telemetry & Observability -**Status:** TODO +**Status:** DONE **Priority:** P1 - HIGH **Module:** Web, Telemetry **Working Directory:** `src/Web/StellaOps.Web/src/app/`, `src/Telemetry/StellaOps.Telemetry.Core/` @@ -462,11 +462,11 @@ sum(rate(stellaops_performance_budget_violations_total[5m])) by (phase) | # | Task | Status | Assignee | Notes | |---|------|--------|----------|-------| -| 1 | Create `TtfsTelemetryService` | TODO | | Per §3.1 | -| 2 | Implement `EvidenceBitset` | TODO | | | -| 3 | Add backend metrics | TODO | | Per §3.2 | -| 4 | Create telemetry ingestion endpoint | TODO | | Per §3.3 | -| 5 | Integrate into triage workspace | TODO | | | +| 1 | Create `TtfsTelemetryService` | DONE | | ttfs-telemetry.service.ts with batched event sending | +| 2 | Implement `EvidenceBitset` | DONE | | evidence.model.ts (TypeScript) + TriageMetrics.cs (C#) | +| 3 | Add backend metrics | DONE | | TriageMetrics.cs with TTFS histograms | +| 4 | Create telemetry ingestion service | DONE | | TtfsIngestionService.cs | +| 5 | Integrate into triage workspace | DONE | | triage-workspace.component.ts | | 6 | Create Grafana dashboard | TODO | | Per §3.4 | | 7 | Add alerting rules for budget violations | TODO | | | | 8 | Write unit tests | TODO | | | diff --git a/docs/implplan/SPRINT_4602_0001_0001_decision_drawer_evidence_tab.md b/docs/implplan/SPRINT_4602_0001_0001_decision_drawer_evidence_tab.md index 3b6de64c1..cf8a0e2bd 100644 --- a/docs/implplan/SPRINT_4602_0001_0001_decision_drawer_evidence_tab.md +++ b/docs/implplan/SPRINT_4602_0001_0001_decision_drawer_evidence_tab.md @@ -1,6 +1,6 @@ # SPRINT_4602_0001_0001 - Decision Drawer & Evidence Tab UX -**Status:** TODO +**Status:** DONE **Priority:** P2 - MEDIUM **Module:** Web (Angular) **Working Directory:** `src/Web/StellaOps.Web/src/app/features/triage/` @@ -704,16 +704,17 @@ export class AlertDetailComponent implements OnInit { | # | Task | Status | Assignee | Notes | |---|------|--------|----------|-------| -| 1 | Create `EvidencePillsComponent` | TODO | | Per §3.1 | -| 2 | Create `DecisionDrawerComponent` | TODO | | Per §3.2 | -| 3 | Update `AlertDetailComponent` layout | TODO | | Per §3.3 | -| 4 | Set Evidence tab as default | TODO | | | -| 5 | Implement Diff tab content | TODO | | | -| 6 | Implement Activity tab with export | TODO | | | -| 7 | Add keyboard integration | TODO | | A/N/U keys | -| 8 | Add responsive behavior | TODO | | | -| 9 | Write component tests | TODO | | | -| 10 | Update Storybook stories | TODO | | | +| 1 | Create `EvidencePillsComponent` | DONE | | evidence-pills.component.ts | +| 2 | Create `DecisionDrawerComponent` | DONE | | decision-drawer.component.ts | +| 3 | Create Evidence model | DONE | | evidence.model.ts with EvidenceBitset | +| 4 | Update triage workspace layout | DONE | | Integrated into triage-workspace.component.ts | +| 5 | Set Evidence tab as default | DONE | | activeTab default changed to 'evidence' | +| 6 | Implement Evidence tab content | DONE | | Full evidence sections for reachability, callstack, provenance, VEX | +| 7 | Add TTFS telemetry integration | DONE | | ttfs-telemetry.service.ts integrated | +| 8 | Add keyboard integration | DONE | | A/N/U keys in drawer | +| 9 | Add evidence pills integration | DONE | | Pills shown at top of detail panel | +| 10 | Write component tests | TODO | | | +| 11 | Update Storybook stories | TODO | | | --- diff --git a/docs/implplan/SPRINT_0336_0001_0001_product_advisories_14_dec_2025_thematic_refs.md b/docs/implplan/archived/SPRINT_0336_0001_0001_product_advisories_14_dec_2025_thematic_refs.md similarity index 100% rename from docs/implplan/SPRINT_0336_0001_0001_product_advisories_14_dec_2025_thematic_refs.md rename to docs/implplan/archived/SPRINT_0336_0001_0001_product_advisories_14_dec_2025_thematic_refs.md diff --git a/docs/implplan/SPRINT_0337_0001_0001_cvss_advisory_enhancement.md b/docs/implplan/archived/SPRINT_0337_0001_0001_cvss_advisory_enhancement.md similarity index 100% rename from docs/implplan/SPRINT_0337_0001_0001_cvss_advisory_enhancement.md rename to docs/implplan/archived/SPRINT_0337_0001_0001_cvss_advisory_enhancement.md diff --git a/docs/implplan/SPRINT_0338_0001_0001_cvss_epss_development.md b/docs/implplan/archived/SPRINT_0338_0001_0001_cvss_epss_development.md similarity index 100% rename from docs/implplan/SPRINT_0338_0001_0001_cvss_epss_development.md rename to docs/implplan/archived/SPRINT_0338_0001_0001_cvss_epss_development.md diff --git a/src/AirGap/AGENTS.md b/src/AirGap/AGENTS.md index 11098d80a..d5a518776 100644 --- a/src/AirGap/AGENTS.md +++ b/src/AirGap/AGENTS.md @@ -29,6 +29,7 @@ - Tenancy/scopes: enforce Authority scopes (`airgap:seal`, `airgap:status:read`, importer scopes) on every API. - Validation: prefer `$jsonSchema`/FluentValidation; fail closed on trust-root mismatch. - Logging/Telemetry: structured logs; counters/histograms prefixed `airgap.*`; tag `tenant`, `sealed`, `result`. +- Monotonicity/quarantine: enforce version rollback prevention per tenant/type and quarantine failed bundles under `/updates/quarantine//...` with TTL + quota guardrails. - Cross-module edits require sprint note; otherwise stay within `src/AirGap`. ## Testing Rules diff --git a/src/AirGap/StellaOps.AirGap.Importer/AGENTS.md b/src/AirGap/StellaOps.AirGap.Importer/AGENTS.md index 6517276ba..25cd83d7b 100644 --- a/src/AirGap/StellaOps.AirGap.Importer/AGENTS.md +++ b/src/AirGap/StellaOps.AirGap.Importer/AGENTS.md @@ -18,12 +18,13 @@ Deliver offline bundle verification and ingestion tooling for sealed environment ### Versioning (Sprint 0338) - `IVersionMonotonicityChecker` - Validates incoming versions are newer than active - `IBundleVersionStore` - Postgres-backed version tracking per tenant/type +- Activation records include `bundleDigest`, `activatedAt`, and (when forced) `forceActivateReason` - `BundleVersion` - SemVer + timestamp model with `IsNewerThan()` comparison ### Quarantine (Sprint 0338) - `IQuarantineService` - Preserves failed bundles with diagnostics -- `FileSystemQuarantineService` - Implementation with TTL cleanup -- Structure: `/updates/quarantine/-/` with bundle, manifest, verification.log, failure-reason.txt +- `FileSystemQuarantineService` - Implementation with TTL cleanup + per-tenant quota enforcement +- Structure: `/updates/quarantine//--/` with `bundle.tar.zst`, optional `manifest.json`, `verification.log`, `failure-reason.txt`, and `quarantine.json` metadata (removals move to `...//.removed/`) ### Telemetry (Sprint 0341) - `OfflineKitMetrics` - Prometheus metrics (import counts, latencies) diff --git a/src/AirGap/StellaOps.AirGap.Importer/Quarantine/FileSystemQuarantineService.cs b/src/AirGap/StellaOps.AirGap.Importer/Quarantine/FileSystemQuarantineService.cs new file mode 100644 index 000000000..ffa34e8d8 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Importer/Quarantine/FileSystemQuarantineService.cs @@ -0,0 +1,380 @@ +using System.Globalization; +using System.Text.Json; +using System.Text.RegularExpressions; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; + +namespace StellaOps.AirGap.Importer.Quarantine; + +public sealed class FileSystemQuarantineService : IQuarantineService +{ + private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true + }; + + private readonly QuarantineOptions _options; + private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; + + public FileSystemQuarantineService( + IOptions options, + ILogger logger, + TimeProvider timeProvider) + { + _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + } + + public async Task QuarantineAsync( + QuarantineRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentException.ThrowIfNullOrWhiteSpace(request.TenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(request.BundlePath); + ArgumentException.ThrowIfNullOrWhiteSpace(request.ReasonCode); + + if (!File.Exists(request.BundlePath)) + { + return new QuarantineResult( + Success: false, + QuarantineId: "", + QuarantinePath: "", + QuarantinedAt: _timeProvider.GetUtcNow(), + ErrorMessage: "bundle-path-not-found"); + } + + var tenantRoot = Path.Combine(_options.QuarantineRoot, SanitizeForPathSegment(request.TenantId)); + + if (_options.EnableAutomaticCleanup && _options.RetentionPeriod > TimeSpan.Zero) + { + _ = await CleanupExpiredAsync(_options.RetentionPeriod, cancellationToken).ConfigureAwait(false); + } + + if (_options.MaxQuarantineSizeBytes > 0) + { + var bundleSize = new FileInfo(request.BundlePath).Length; + var currentSize = GetDirectorySizeBytes(tenantRoot); + if (currentSize + bundleSize > _options.MaxQuarantineSizeBytes) + { + return new QuarantineResult( + Success: false, + QuarantineId: "", + QuarantinePath: "", + QuarantinedAt: _timeProvider.GetUtcNow(), + ErrorMessage: "quarantine-quota-exceeded"); + } + } + + var now = _timeProvider.GetUtcNow(); + var timestamp = now.ToString("yyyyMMdd-HHmmss", CultureInfo.InvariantCulture); + var sanitizedReason = SanitizeForPathSegment(request.ReasonCode); + var quarantineId = $"{timestamp}-{sanitizedReason}-{Guid.NewGuid():N}"; + + var quarantinePath = Path.Combine(tenantRoot, quarantineId); + + try + { + Directory.CreateDirectory(quarantinePath); + + var bundleDestination = Path.Combine(quarantinePath, "bundle.tar.zst"); + File.Copy(request.BundlePath, bundleDestination, overwrite: false); + + if (request.ManifestJson is not null) + { + await File.WriteAllTextAsync( + Path.Combine(quarantinePath, "manifest.json"), + request.ManifestJson, + cancellationToken).ConfigureAwait(false); + } + + var verificationLogPath = Path.Combine(quarantinePath, "verification.log"); + await File.WriteAllLinesAsync(verificationLogPath, request.VerificationLog, cancellationToken).ConfigureAwait(false); + + var failureReasonPath = Path.Combine(quarantinePath, "failure-reason.txt"); + await File.WriteAllTextAsync( + failureReasonPath, + BuildFailureReasonText(request, now), + cancellationToken).ConfigureAwait(false); + + var bundleSize = new FileInfo(bundleDestination).Length; + + var entry = new QuarantineEntry( + QuarantineId: quarantineId, + TenantId: request.TenantId, + OriginalBundleName: Path.GetFileName(request.BundlePath), + ReasonCode: request.ReasonCode, + ReasonMessage: request.ReasonMessage, + QuarantinedAt: now, + BundleSizeBytes: bundleSize, + QuarantinePath: quarantinePath); + + await File.WriteAllTextAsync( + Path.Combine(quarantinePath, "quarantine.json"), + JsonSerializer.Serialize(entry, JsonOptions), + cancellationToken).ConfigureAwait(false); + + _logger.LogWarning( + "Bundle quarantined: tenant={TenantId} quarantineId={QuarantineId} reason={ReasonCode} path={Path}", + request.TenantId, + quarantineId, + request.ReasonCode, + quarantinePath); + + return new QuarantineResult( + Success: true, + QuarantineId: quarantineId, + QuarantinePath: quarantinePath, + QuarantinedAt: now); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to quarantine bundle to {Path}", quarantinePath); + return new QuarantineResult( + Success: false, + QuarantineId: quarantineId, + QuarantinePath: quarantinePath, + QuarantinedAt: now, + ErrorMessage: ex.Message); + } + } + + public async Task> ListAsync( + string tenantId, + QuarantineListOptions? options = null, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + options ??= new QuarantineListOptions(); + + var tenantRoot = Path.Combine(_options.QuarantineRoot, SanitizeForPathSegment(tenantId)); + if (!Directory.Exists(tenantRoot)) + { + return Array.Empty(); + } + + var entries = new List(); + foreach (var dir in Directory.EnumerateDirectories(tenantRoot)) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (Path.GetFileName(dir).Equals(".removed", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + var jsonPath = Path.Combine(dir, "quarantine.json"); + if (!File.Exists(jsonPath)) + { + continue; + } + + try + { + var json = await File.ReadAllTextAsync(jsonPath, cancellationToken).ConfigureAwait(false); + var entry = JsonSerializer.Deserialize(json, JsonOptions); + if (entry is null) + { + continue; + } + + if (!string.IsNullOrWhiteSpace(options.ReasonCodeFilter) && + !entry.ReasonCode.Equals(options.ReasonCodeFilter, StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + if (options.Since is { } since && entry.QuarantinedAt < since) + { + continue; + } + + if (options.Until is { } until && entry.QuarantinedAt > until) + { + continue; + } + + entries.Add(entry); + } + catch + { + continue; + } + } + + return entries + .OrderBy(e => e.QuarantinedAt) + .ThenBy(e => e.QuarantineId, StringComparer.Ordinal) + .Take(Math.Max(0, options.Limit)) + .ToArray(); + } + + public async Task RemoveAsync( + string tenantId, + string quarantineId, + string removalReason, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(quarantineId); + ArgumentException.ThrowIfNullOrWhiteSpace(removalReason); + + var tenantRoot = Path.Combine(_options.QuarantineRoot, SanitizeForPathSegment(tenantId)); + var entryPath = Path.Combine(tenantRoot, quarantineId); + if (!Directory.Exists(entryPath)) + { + return false; + } + + var removalPath = Path.Combine(entryPath, "removal-reason.txt"); + await File.WriteAllTextAsync( + removalPath, + $"RemovedAt: {_timeProvider.GetUtcNow():O}{Environment.NewLine}Reason: {removalReason}{Environment.NewLine}", + cancellationToken).ConfigureAwait(false); + + var removedRoot = Path.Combine(tenantRoot, ".removed"); + Directory.CreateDirectory(removedRoot); + var removedPath = Path.Combine(removedRoot, quarantineId); + if (Directory.Exists(removedPath)) + { + removedPath = Path.Combine(removedRoot, $"{quarantineId}-{Guid.NewGuid():N}"); + } + + Directory.Move(entryPath, removedPath); + + _logger.LogInformation( + "Quarantine removed: tenant={TenantId} quarantineId={QuarantineId} removedPath={RemovedPath}", + tenantId, + quarantineId, + removedPath); + + return true; + } + + public Task CleanupExpiredAsync(TimeSpan retentionPeriod, CancellationToken cancellationToken = default) + { + if (retentionPeriod <= TimeSpan.Zero) + { + return Task.FromResult(0); + } + + var now = _timeProvider.GetUtcNow(); + var threshold = now - retentionPeriod; + if (!Directory.Exists(_options.QuarantineRoot)) + { + return Task.FromResult(0); + } + + var removedCount = 0; + foreach (var tenantRoot in Directory.EnumerateDirectories(_options.QuarantineRoot)) + { + cancellationToken.ThrowIfCancellationRequested(); + + removedCount += CleanupExpiredInTenant(tenantRoot, threshold, cancellationToken); + + var removedRoot = Path.Combine(tenantRoot, ".removed"); + if (Directory.Exists(removedRoot)) + { + removedCount += CleanupExpiredInTenant(removedRoot, threshold, cancellationToken); + } + } + + return Task.FromResult(removedCount); + } + + private static int CleanupExpiredInTenant(string tenantRoot, DateTimeOffset threshold, CancellationToken cancellationToken) + { + var removedCount = 0; + + foreach (var dir in Directory.EnumerateDirectories(tenantRoot)) + { + cancellationToken.ThrowIfCancellationRequested(); + + var jsonPath = Path.Combine(dir, "quarantine.json"); + if (!File.Exists(jsonPath)) + { + continue; + } + + try + { + var json = File.ReadAllText(jsonPath); + var entry = JsonSerializer.Deserialize(json, JsonOptions); + if (entry is null) + { + continue; + } + + if (entry.QuarantinedAt >= threshold) + { + continue; + } + + Directory.Delete(dir, recursive: true); + removedCount++; + } + catch + { + continue; + } + } + + return removedCount; + } + + private static string BuildFailureReasonText(QuarantineRequest request, DateTimeOffset now) + { + var metadataLines = request.Metadata is null + ? Array.Empty() + : request.Metadata + .OrderBy(kv => kv.Key, StringComparer.Ordinal) + .Select(kv => $" {kv.Key}: {kv.Value}") + .ToArray(); + + return $""" + Quarantine Reason: {request.ReasonCode} + Message: {request.ReasonMessage} + Timestamp: {now:O} + Tenant: {request.TenantId} + Original Bundle: {Path.GetFileName(request.BundlePath)} + + Metadata: + {string.Join(Environment.NewLine, metadataLines)} + """; + } + + private static string SanitizeForPathSegment(string input) + { + input = input.Trim(); + if (input.Length == 0) + { + return "_"; + } + return Regex.Replace(input, @"[^a-zA-Z0-9_-]", "_"); + } + + private static long GetDirectorySizeBytes(string directory) + { + if (!Directory.Exists(directory)) + { + return 0; + } + + long total = 0; + foreach (var file in Directory.EnumerateFiles(directory, "*", SearchOption.AllDirectories)) + { + try + { + total += new FileInfo(file).Length; + } + catch + { + continue; + } + } + + return total; + } +} diff --git a/src/AirGap/StellaOps.AirGap.Importer/Quarantine/IQuarantineService.cs b/src/AirGap/StellaOps.AirGap.Importer/Quarantine/IQuarantineService.cs new file mode 100644 index 000000000..fe2258fd5 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Importer/Quarantine/IQuarantineService.cs @@ -0,0 +1,55 @@ +namespace StellaOps.AirGap.Importer.Quarantine; + +public interface IQuarantineService +{ + Task QuarantineAsync( + QuarantineRequest request, + CancellationToken cancellationToken = default); + + Task> ListAsync( + string tenantId, + QuarantineListOptions? options = null, + CancellationToken cancellationToken = default); + + Task RemoveAsync( + string tenantId, + string quarantineId, + string removalReason, + CancellationToken cancellationToken = default); + + Task CleanupExpiredAsync( + TimeSpan retentionPeriod, + CancellationToken cancellationToken = default); +} + +public sealed record QuarantineRequest( + string TenantId, + string BundlePath, + string? ManifestJson, + string ReasonCode, + string ReasonMessage, + IReadOnlyList VerificationLog, + IReadOnlyDictionary? Metadata = null); + +public sealed record QuarantineResult( + bool Success, + string QuarantineId, + string QuarantinePath, + DateTimeOffset QuarantinedAt, + string? ErrorMessage = null); + +public sealed record QuarantineEntry( + string QuarantineId, + string TenantId, + string OriginalBundleName, + string ReasonCode, + string ReasonMessage, + DateTimeOffset QuarantinedAt, + long BundleSizeBytes, + string QuarantinePath); + +public sealed record QuarantineListOptions( + string? ReasonCodeFilter = null, + DateTimeOffset? Since = null, + DateTimeOffset? Until = null, + int Limit = 100); diff --git a/src/AirGap/StellaOps.AirGap.Importer/Quarantine/QuarantineOptions.cs b/src/AirGap/StellaOps.AirGap.Importer/Quarantine/QuarantineOptions.cs new file mode 100644 index 000000000..f05b237b1 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Importer/Quarantine/QuarantineOptions.cs @@ -0,0 +1,30 @@ +namespace StellaOps.AirGap.Importer.Quarantine; + +public sealed class QuarantineOptions +{ + public const string SectionName = "AirGap:Quarantine"; + + /// + /// Root directory for quarantined bundles. + /// Default: /updates/quarantine + /// + public string QuarantineRoot { get; set; } = "/updates/quarantine"; + + /// + /// Retention period for quarantined bundles before automatic cleanup. + /// Default: 30 days + /// + public TimeSpan RetentionPeriod { get; set; } = TimeSpan.FromDays(30); + + /// + /// Maximum total size of the quarantine directory per tenant, in bytes. + /// Default: 10 GB + /// + public long MaxQuarantineSizeBytes { get; set; } = 10L * 1024 * 1024 * 1024; + + /// + /// Whether to run TTL cleanup during quarantine operations. + /// Default: true + /// + public bool EnableAutomaticCleanup { get; set; } = true; +} diff --git a/src/AirGap/StellaOps.AirGap.Importer/StellaOps.AirGap.Importer.csproj b/src/AirGap/StellaOps.AirGap.Importer/StellaOps.AirGap.Importer.csproj index baaafad01..912d5a025 100644 --- a/src/AirGap/StellaOps.AirGap.Importer/StellaOps.AirGap.Importer.csproj +++ b/src/AirGap/StellaOps.AirGap.Importer/StellaOps.AirGap.Importer.csproj @@ -5,4 +5,9 @@ enable StellaOps.AirGap.Importer + + + + + diff --git a/src/AirGap/StellaOps.AirGap.Importer/Validation/ImportValidator.cs b/src/AirGap/StellaOps.AirGap.Importer/Validation/ImportValidator.cs index 6976b8ed7..4c85fe919 100644 --- a/src/AirGap/StellaOps.AirGap.Importer/Validation/ImportValidator.cs +++ b/src/AirGap/StellaOps.AirGap.Importer/Validation/ImportValidator.cs @@ -1,9 +1,12 @@ +using Microsoft.Extensions.Logging; using StellaOps.AirGap.Importer.Contracts; +using StellaOps.AirGap.Importer.Quarantine; +using StellaOps.AirGap.Importer.Versioning; namespace StellaOps.AirGap.Importer.Validation; /// -/// Coordinates DSSE, TUF, and Merkle verification for an offline import. Stateless and deterministic. +/// Coordinates DSSE, TUF, Merkle, monotonicity, and quarantine behaviors for an offline import. /// public sealed class ImportValidator { @@ -11,46 +14,214 @@ public sealed class ImportValidator private readonly TufMetadataValidator _tuf; private readonly MerkleRootCalculator _merkle; private readonly RootRotationPolicy _rotation; + private readonly IVersionMonotonicityChecker _monotonicityChecker; + private readonly IQuarantineService _quarantineService; + private readonly ILogger _logger; - public ImportValidator() + public ImportValidator( + DsseVerifier dsse, + TufMetadataValidator tuf, + MerkleRootCalculator merkle, + RootRotationPolicy rotation, + IVersionMonotonicityChecker monotonicityChecker, + IQuarantineService quarantineService, + ILogger logger) { - _dsse = new DsseVerifier(); - _tuf = new TufMetadataValidator(); - _merkle = new MerkleRootCalculator(); - _rotation = new RootRotationPolicy(); + _dsse = dsse ?? throw new ArgumentNullException(nameof(dsse)); + _tuf = tuf ?? throw new ArgumentNullException(nameof(tuf)); + _merkle = merkle ?? throw new ArgumentNullException(nameof(merkle)); + _rotation = rotation ?? throw new ArgumentNullException(nameof(rotation)); + _monotonicityChecker = monotonicityChecker ?? throw new ArgumentNullException(nameof(monotonicityChecker)); + _quarantineService = quarantineService ?? throw new ArgumentNullException(nameof(quarantineService)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } - public BundleValidationResult Validate(ImportValidationRequest request) + public async Task ValidateAsync( + ImportValidationRequest request, + CancellationToken cancellationToken = default) { + ArgumentNullException.ThrowIfNull(request); + ArgumentException.ThrowIfNullOrWhiteSpace(request.TenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(request.BundleType); + ArgumentException.ThrowIfNullOrWhiteSpace(request.BundleDigest); + ArgumentException.ThrowIfNullOrWhiteSpace(request.ManifestVersion); + + var verificationLog = new List(capacity: 16); + var tufResult = _tuf.Validate(request.RootJson, request.SnapshotJson, request.TimestampJson); if (!tufResult.IsValid) { - return tufResult with { Reason = $"tuf:{tufResult.Reason}" }; + var failed = tufResult with { Reason = $"tuf:{tufResult.Reason}" }; + verificationLog.Add(failed.Reason); + await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false); + return failed; } + verificationLog.Add($"tuf:{tufResult.Reason}"); var dsseResult = _dsse.Verify(request.Envelope, request.TrustRoots); if (!dsseResult.IsValid) { - return dsseResult with { Reason = $"dsse:{dsseResult.Reason}" }; + var failed = dsseResult with { Reason = $"dsse:{dsseResult.Reason}" }; + verificationLog.Add(failed.Reason); + await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false); + return failed; } + verificationLog.Add($"dsse:{dsseResult.Reason}"); var merkleRoot = _merkle.ComputeRoot(request.PayloadEntries); if (string.IsNullOrEmpty(merkleRoot)) { - return BundleValidationResult.Failure("merkle-empty"); + var failed = BundleValidationResult.Failure("merkle-empty"); + verificationLog.Add(failed.Reason); + await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false); + return failed; } + verificationLog.Add($"merkle:{merkleRoot}"); var rotationResult = _rotation.Validate(request.TrustStore.ActiveKeys, request.TrustStore.PendingKeys, request.ApproverIds); if (!rotationResult.IsValid) { - return rotationResult with { Reason = $"rotation:{rotationResult.Reason}" }; + var failed = rotationResult with { Reason = $"rotation:{rotationResult.Reason}" }; + verificationLog.Add(failed.Reason); + await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false); + return failed; + } + verificationLog.Add($"rotation:{rotationResult.Reason}"); + + BundleVersion incomingVersion; + try + { + incomingVersion = BundleVersion.Parse(request.ManifestVersion, request.ManifestCreatedAt); + } + catch (Exception ex) + { + var failed = BundleValidationResult.Failure($"manifest-version-parse-failed:{ex.GetType().Name.ToLowerInvariant()}"); + verificationLog.Add(failed.Reason); + await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false); + return failed; + } + + var monotonicity = await _monotonicityChecker.CheckAsync( + request.TenantId, + request.BundleType, + incomingVersion, + cancellationToken).ConfigureAwait(false); + + if (!monotonicity.IsMonotonic && !request.ForceActivate) + { + var failed = BundleValidationResult.Failure( + $"version-non-monotonic:incoming={incomingVersion.SemVer}:current={monotonicity.CurrentVersion?.SemVer ?? "(none)"}"); + verificationLog.Add(failed.Reason); + await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false); + return failed; + } + + if (!monotonicity.IsMonotonic && request.ForceActivate) + { + if (string.IsNullOrWhiteSpace(request.ForceActivateReason)) + { + var failed = BundleValidationResult.Failure("force-activate-reason-required"); + verificationLog.Add(failed.Reason); + await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false); + return failed; + } + + _logger.LogWarning( + "Non-monotonic activation forced: tenant={TenantId} bundleType={BundleType} incoming={Incoming} current={Current} reason={Reason}", + request.TenantId, + request.BundleType, + incomingVersion.SemVer, + monotonicity.CurrentVersion?.SemVer, + request.ForceActivateReason); + } + + try + { + await _monotonicityChecker.RecordActivationAsync( + request.TenantId, + request.BundleType, + incomingVersion, + request.BundleDigest, + request.ForceActivate, + request.ForceActivateReason, + cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to record bundle activation for tenant={TenantId} bundleType={BundleType}", request.TenantId, request.BundleType); + var failed = BundleValidationResult.Failure($"version-store-write-failed:{ex.GetType().Name.ToLowerInvariant()}"); + verificationLog.Add(failed.Reason); + await TryQuarantineAsync(request, failed, verificationLog, cancellationToken).ConfigureAwait(false); + return failed; } return BundleValidationResult.Success("import-validated"); } + + private async Task TryQuarantineAsync( + ImportValidationRequest request, + BundleValidationResult failure, + IReadOnlyList verificationLog, + CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(request.BundlePath) || !File.Exists(request.BundlePath)) + { + return; + } + + try + { + var metadata = new Dictionary(StringComparer.Ordinal) + { + ["bundleType"] = request.BundleType, + ["bundleDigest"] = request.BundleDigest, + ["manifestVersion"] = request.ManifestVersion, + ["manifestCreatedAt"] = request.ManifestCreatedAt.ToString("O"), + ["forceActivate"] = request.ForceActivate.ToString() + }; + + if (!string.IsNullOrWhiteSpace(request.ForceActivateReason)) + { + metadata["forceActivateReason"] = request.ForceActivateReason; + } + + var quarantine = await _quarantineService.QuarantineAsync( + new QuarantineRequest( + request.TenantId, + request.BundlePath, + request.ManifestJson, + failure.Reason, + failure.Reason, + verificationLog, + metadata), + cancellationToken).ConfigureAwait(false); + + if (!quarantine.Success) + { + _logger.LogError( + "Failed to quarantine bundle for tenant={TenantId} path={BundlePath} error={Error}", + request.TenantId, + request.BundlePath, + quarantine.ErrorMessage); + } + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to quarantine bundle for tenant={TenantId} path={BundlePath}", request.TenantId, request.BundlePath); + } + } } public sealed record ImportValidationRequest( + string TenantId, + string BundleType, + string BundleDigest, + string BundlePath, + string? ManifestJson, + string ManifestVersion, + DateTimeOffset ManifestCreatedAt, + bool ForceActivate, + string? ForceActivateReason, DsseEnvelope Envelope, TrustRootConfig TrustRoots, string RootJson, diff --git a/src/AirGap/StellaOps.AirGap.Importer/Versioning/BundleVersion.cs b/src/AirGap/StellaOps.AirGap.Importer/Versioning/BundleVersion.cs new file mode 100644 index 000000000..55c01f3f0 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Importer/Versioning/BundleVersion.cs @@ -0,0 +1,144 @@ +using System.Globalization; + +namespace StellaOps.AirGap.Importer.Versioning; + +/// +/// Represents a bundle version with semantic versioning and timestamp. +/// Monotonicity is enforced by comparing (Major, Minor, Patch, Prerelease, CreatedAt). +/// +public sealed record BundleVersion( + int Major, + int Minor, + int Patch, + DateTimeOffset CreatedAt, + string? Prerelease = null) +{ + public static BundleVersion Parse(string version, DateTimeOffset createdAt) + { + ArgumentException.ThrowIfNullOrWhiteSpace(version); + + var dashIndex = version.IndexOf('-', StringComparison.Ordinal); + var core = dashIndex < 0 ? version : version[..dashIndex]; + var prerelease = dashIndex < 0 ? null : version[(dashIndex + 1)..]; + + var parts = core.Split('.', StringSplitOptions.TrimEntries | StringSplitOptions.RemoveEmptyEntries); + if (parts.Length != 3) + { + throw new FormatException($"Invalid version core '{core}'. Expected '..'."); + } + + if (!int.TryParse(parts[0], NumberStyles.None, CultureInfo.InvariantCulture, out var major) || + !int.TryParse(parts[1], NumberStyles.None, CultureInfo.InvariantCulture, out var minor) || + !int.TryParse(parts[2], NumberStyles.None, CultureInfo.InvariantCulture, out var patch)) + { + throw new FormatException($"Invalid version numbers in '{core}'."); + } + + if (major < 0 || minor < 0 || patch < 0) + { + throw new FormatException($"Invalid version numbers in '{core}'."); + } + + prerelease = string.IsNullOrWhiteSpace(prerelease) ? null : prerelease.Trim(); + + return new BundleVersion(major, minor, patch, createdAt, prerelease); + } + + public string SemVer => + string.IsNullOrWhiteSpace(Prerelease) + ? $"{Major}.{Minor}.{Patch}" + : $"{Major}.{Minor}.{Patch}-{Prerelease}"; + + public bool IsNewerThan(BundleVersion other) + { + ArgumentNullException.ThrowIfNull(other); + + if (Major != other.Major) + { + return Major > other.Major; + } + + if (Minor != other.Minor) + { + return Minor > other.Minor; + } + + if (Patch != other.Patch) + { + return Patch > other.Patch; + } + + var prereleaseComparison = ComparePrerelease(Prerelease, other.Prerelease); + if (prereleaseComparison != 0) + { + return prereleaseComparison > 0; + } + + return CreatedAt > other.CreatedAt; + } + + private static int ComparePrerelease(string? left, string? right) + { + var leftEmpty = string.IsNullOrWhiteSpace(left); + var rightEmpty = string.IsNullOrWhiteSpace(right); + + // Per SemVer: absence of prerelease indicates higher precedence than any prerelease. + if (leftEmpty && rightEmpty) + { + return 0; + } + + if (leftEmpty) + { + return 1; + } + + if (rightEmpty) + { + return -1; + } + + var leftIds = left!.Split('.', StringSplitOptions.None); + var rightIds = right!.Split('.', StringSplitOptions.None); + + var min = Math.Min(leftIds.Length, rightIds.Length); + for (var i = 0; i < min; i++) + { + var a = leftIds[i]; + var b = rightIds[i]; + + var aIsNum = int.TryParse(a, NumberStyles.None, CultureInfo.InvariantCulture, out var aNum); + var bIsNum = int.TryParse(b, NumberStyles.None, CultureInfo.InvariantCulture, out var bNum); + + if (aIsNum && bIsNum) + { + var cmp = aNum.CompareTo(bNum); + if (cmp != 0) + { + return cmp; + } + continue; + } + + if (aIsNum && !bIsNum) + { + return -1; + } + + if (!aIsNum && bIsNum) + { + return 1; + } + + var s = string.Compare(a, b, StringComparison.Ordinal); + if (s != 0) + { + return s; + } + } + + return leftIds.Length.CompareTo(rightIds.Length); + } + + public override string ToString() => $"{SemVer} ({CreatedAt:O})"; +} diff --git a/src/AirGap/StellaOps.AirGap.Importer/Versioning/IBundleVersionStore.cs b/src/AirGap/StellaOps.AirGap.Importer/Versioning/IBundleVersionStore.cs new file mode 100644 index 000000000..ef35f4cb5 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Importer/Versioning/IBundleVersionStore.cs @@ -0,0 +1,33 @@ +namespace StellaOps.AirGap.Importer.Versioning; + +public interface IBundleVersionStore +{ + Task GetCurrentAsync( + string tenantId, + string bundleType, + CancellationToken ct = default); + + Task UpsertAsync( + BundleVersionRecord record, + CancellationToken ct = default); + + Task> GetHistoryAsync( + string tenantId, + string bundleType, + int limit = 10, + CancellationToken ct = default); +} + +public sealed record BundleVersionRecord( + string TenantId, + string BundleType, + string VersionString, + int Major, + int Minor, + int Patch, + string? Prerelease, + DateTimeOffset BundleCreatedAt, + string BundleDigest, + DateTimeOffset ActivatedAt, + bool WasForceActivated, + string? ForceActivateReason); diff --git a/src/AirGap/StellaOps.AirGap.Importer/Versioning/IVersionMonotonicityChecker.cs b/src/AirGap/StellaOps.AirGap.Importer/Versioning/IVersionMonotonicityChecker.cs new file mode 100644 index 000000000..ab10c1430 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Importer/Versioning/IVersionMonotonicityChecker.cs @@ -0,0 +1,26 @@ +namespace StellaOps.AirGap.Importer.Versioning; + +public interface IVersionMonotonicityChecker +{ + Task CheckAsync( + string tenantId, + string bundleType, + BundleVersion incomingVersion, + CancellationToken cancellationToken = default); + + Task RecordActivationAsync( + string tenantId, + string bundleType, + BundleVersion version, + string bundleDigest, + bool wasForceActivated = false, + string? forceActivateReason = null, + CancellationToken cancellationToken = default); +} + +public sealed record MonotonicityCheckResult( + bool IsMonotonic, + BundleVersion? CurrentVersion, + string? CurrentBundleDigest, + DateTimeOffset? CurrentActivatedAt, + string ReasonCode); // "MONOTONIC_OK" | "VERSION_NON_MONOTONIC" | "FIRST_ACTIVATION" diff --git a/src/AirGap/StellaOps.AirGap.Importer/Versioning/VersionMonotonicityChecker.cs b/src/AirGap/StellaOps.AirGap.Importer/Versioning/VersionMonotonicityChecker.cs new file mode 100644 index 000000000..aa2b0c059 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Importer/Versioning/VersionMonotonicityChecker.cs @@ -0,0 +1,95 @@ +namespace StellaOps.AirGap.Importer.Versioning; + +public sealed class VersionMonotonicityChecker : IVersionMonotonicityChecker +{ + private readonly IBundleVersionStore _store; + private readonly TimeProvider _timeProvider; + + public VersionMonotonicityChecker(IBundleVersionStore store, TimeProvider timeProvider) + { + _store = store ?? throw new ArgumentNullException(nameof(store)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + } + + public async Task CheckAsync( + string tenantId, + string bundleType, + BundleVersion incomingVersion, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(bundleType); + ArgumentNullException.ThrowIfNull(incomingVersion); + + var current = await _store.GetCurrentAsync(tenantId, bundleType, cancellationToken).ConfigureAwait(false); + if (current is null) + { + return new MonotonicityCheckResult( + IsMonotonic: true, + CurrentVersion: null, + CurrentBundleDigest: null, + CurrentActivatedAt: null, + ReasonCode: "FIRST_ACTIVATION"); + } + + var currentVersion = new BundleVersion( + current.Major, + current.Minor, + current.Patch, + current.BundleCreatedAt, + current.Prerelease); + + var isMonotonic = incomingVersion.IsNewerThan(currentVersion); + return new MonotonicityCheckResult( + IsMonotonic: isMonotonic, + CurrentVersion: currentVersion, + CurrentBundleDigest: current.BundleDigest, + CurrentActivatedAt: current.ActivatedAt, + ReasonCode: isMonotonic ? "MONOTONIC_OK" : "VERSION_NON_MONOTONIC"); + } + + public async Task RecordActivationAsync( + string tenantId, + string bundleType, + BundleVersion version, + string bundleDigest, + bool wasForceActivated = false, + string? forceActivateReason = null, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(bundleType); + ArgumentNullException.ThrowIfNull(version); + ArgumentException.ThrowIfNullOrWhiteSpace(bundleDigest); + + if (wasForceActivated && string.IsNullOrWhiteSpace(forceActivateReason)) + { + throw new ArgumentException("Force-activate requires a non-empty reason.", nameof(forceActivateReason)); + } + + var check = await CheckAsync(tenantId, bundleType, version, cancellationToken).ConfigureAwait(false); + if (!check.IsMonotonic && !wasForceActivated) + { + throw new InvalidOperationException( + $"Incoming version '{version.SemVer}' is not monotonic vs current '{check.CurrentVersion?.SemVer}'."); + } + + var activatedAt = _timeProvider.GetUtcNow(); + + var record = new BundleVersionRecord( + TenantId: tenantId, + BundleType: bundleType, + VersionString: version.SemVer, + Major: version.Major, + Minor: version.Minor, + Patch: version.Patch, + Prerelease: version.Prerelease, + BundleCreatedAt: version.CreatedAt, + BundleDigest: bundleDigest, + ActivatedAt: activatedAt, + WasForceActivated: wasForceActivated, + ForceActivateReason: wasForceActivated ? forceActivateReason : null); + + await _store.UpsertAsync(record, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/AirGap/StellaOps.AirGap.Storage.Postgres/Repositories/PostgresBundleVersionStore.cs b/src/AirGap/StellaOps.AirGap.Storage.Postgres/Repositories/PostgresBundleVersionStore.cs new file mode 100644 index 000000000..a5eba86d8 --- /dev/null +++ b/src/AirGap/StellaOps.AirGap.Storage.Postgres/Repositories/PostgresBundleVersionStore.cs @@ -0,0 +1,296 @@ +using Microsoft.Extensions.Logging; +using Npgsql; +using StellaOps.AirGap.Importer.Versioning; +using StellaOps.Infrastructure.Postgres.Repositories; + +namespace StellaOps.AirGap.Storage.Postgres.Repositories; + +/// +/// PostgreSQL-backed store for AirGap bundle version activation tracking. +/// +public sealed class PostgresBundleVersionStore : RepositoryBase, IBundleVersionStore +{ + private volatile bool _initialized; + private readonly SemaphoreSlim _initLock = new(1, 1); + + public PostgresBundleVersionStore(AirGapDataSource dataSource, ILogger logger) + : base(dataSource, logger) + { + } + + public async Task GetCurrentAsync( + string tenantId, + string bundleType, + CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(bundleType); + + await EnsureTablesAsync(ct).ConfigureAwait(false); + + var tenantKey = NormalizeKey(tenantId); + var bundleTypeKey = NormalizeKey(bundleType); + + await using var connection = await DataSource.OpenConnectionAsync("public", "reader", ct).ConfigureAwait(false); + const string sql = """ + SELECT tenant_id, bundle_type, version_string, major, minor, patch, prerelease, + bundle_created_at, bundle_digest, activated_at, was_force_activated, force_activate_reason + FROM airgap.bundle_versions + WHERE tenant_id = @tenant_id AND bundle_type = @bundle_type; + """; + + await using var command = CreateCommand(sql, connection); + AddParameter(command, "tenant_id", tenantKey); + AddParameter(command, "bundle_type", bundleTypeKey); + + await using var reader = await command.ExecuteReaderAsync(ct).ConfigureAwait(false); + return await reader.ReadAsync(ct).ConfigureAwait(false) ? Map(reader) : null; + } + + public async Task UpsertAsync(BundleVersionRecord record, CancellationToken ct = default) + { + ArgumentNullException.ThrowIfNull(record); + await EnsureTablesAsync(ct).ConfigureAwait(false); + + var tenantKey = NormalizeKey(record.TenantId); + var bundleTypeKey = NormalizeKey(record.BundleType); + + await using var connection = await DataSource.OpenConnectionAsync("public", "writer", ct).ConfigureAwait(false); + await using var tx = await connection.BeginTransactionAsync(ct).ConfigureAwait(false); + + const string closeHistorySql = """ + UPDATE airgap.bundle_version_history + SET deactivated_at = @activated_at + WHERE tenant_id = @tenant_id AND bundle_type = @bundle_type AND deactivated_at IS NULL; + """; + + await using (var closeCmd = CreateCommand(closeHistorySql, connection)) + { + closeCmd.Transaction = tx; + AddParameter(closeCmd, "tenant_id", tenantKey); + AddParameter(closeCmd, "bundle_type", bundleTypeKey); + AddParameter(closeCmd, "activated_at", record.ActivatedAt); + await closeCmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false); + } + + const string historySql = """ + INSERT INTO airgap.bundle_version_history ( + tenant_id, bundle_type, version_string, major, minor, patch, prerelease, + bundle_created_at, bundle_digest, activated_at, deactivated_at, was_force_activated, force_activate_reason + ) + VALUES ( + @tenant_id, @bundle_type, @version_string, @major, @minor, @patch, @prerelease, + @bundle_created_at, @bundle_digest, @activated_at, NULL, @was_force_activated, @force_activate_reason + ); + """; + + await using (var historyCmd = CreateCommand(historySql, connection)) + { + historyCmd.Transaction = tx; + AddParameter(historyCmd, "tenant_id", tenantKey); + AddParameter(historyCmd, "bundle_type", bundleTypeKey); + AddParameter(historyCmd, "version_string", record.VersionString); + AddParameter(historyCmd, "major", record.Major); + AddParameter(historyCmd, "minor", record.Minor); + AddParameter(historyCmd, "patch", record.Patch); + AddParameter(historyCmd, "prerelease", (object?)record.Prerelease ?? DBNull.Value); + AddParameter(historyCmd, "bundle_created_at", record.BundleCreatedAt); + AddParameter(historyCmd, "bundle_digest", record.BundleDigest); + AddParameter(historyCmd, "activated_at", record.ActivatedAt); + AddParameter(historyCmd, "was_force_activated", record.WasForceActivated); + AddParameter(historyCmd, "force_activate_reason", (object?)record.ForceActivateReason ?? DBNull.Value); + await historyCmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false); + } + + const string upsertSql = """ + INSERT INTO airgap.bundle_versions ( + tenant_id, bundle_type, version_string, major, minor, patch, prerelease, + bundle_created_at, bundle_digest, activated_at, was_force_activated, force_activate_reason + ) + VALUES ( + @tenant_id, @bundle_type, @version_string, @major, @minor, @patch, @prerelease, + @bundle_created_at, @bundle_digest, @activated_at, @was_force_activated, @force_activate_reason + ) + ON CONFLICT (tenant_id, bundle_type) DO UPDATE SET + version_string = EXCLUDED.version_string, + major = EXCLUDED.major, + minor = EXCLUDED.minor, + patch = EXCLUDED.patch, + prerelease = EXCLUDED.prerelease, + bundle_created_at = EXCLUDED.bundle_created_at, + bundle_digest = EXCLUDED.bundle_digest, + activated_at = EXCLUDED.activated_at, + was_force_activated = EXCLUDED.was_force_activated, + force_activate_reason = EXCLUDED.force_activate_reason, + updated_at = NOW(); + """; + + await using (var upsertCmd = CreateCommand(upsertSql, connection)) + { + upsertCmd.Transaction = tx; + AddParameter(upsertCmd, "tenant_id", tenantKey); + AddParameter(upsertCmd, "bundle_type", bundleTypeKey); + AddParameter(upsertCmd, "version_string", record.VersionString); + AddParameter(upsertCmd, "major", record.Major); + AddParameter(upsertCmd, "minor", record.Minor); + AddParameter(upsertCmd, "patch", record.Patch); + AddParameter(upsertCmd, "prerelease", (object?)record.Prerelease ?? DBNull.Value); + AddParameter(upsertCmd, "bundle_created_at", record.BundleCreatedAt); + AddParameter(upsertCmd, "bundle_digest", record.BundleDigest); + AddParameter(upsertCmd, "activated_at", record.ActivatedAt); + AddParameter(upsertCmd, "was_force_activated", record.WasForceActivated); + AddParameter(upsertCmd, "force_activate_reason", (object?)record.ForceActivateReason ?? DBNull.Value); + await upsertCmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false); + } + + await tx.CommitAsync(ct).ConfigureAwait(false); + } + + public async Task> GetHistoryAsync( + string tenantId, + string bundleType, + int limit = 10, + CancellationToken ct = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(bundleType); + + if (limit <= 0) + { + return Array.Empty(); + } + + await EnsureTablesAsync(ct).ConfigureAwait(false); + + var tenantKey = NormalizeKey(tenantId); + var bundleTypeKey = NormalizeKey(bundleType); + + await using var connection = await DataSource.OpenConnectionAsync("public", "reader", ct).ConfigureAwait(false); + const string sql = """ + SELECT tenant_id, bundle_type, version_string, major, minor, patch, prerelease, + bundle_created_at, bundle_digest, activated_at, was_force_activated, force_activate_reason + FROM airgap.bundle_version_history + WHERE tenant_id = @tenant_id AND bundle_type = @bundle_type + ORDER BY activated_at DESC + LIMIT @limit; + """; + + await using var command = CreateCommand(sql, connection); + AddParameter(command, "tenant_id", tenantKey); + AddParameter(command, "bundle_type", bundleTypeKey); + AddParameter(command, "limit", limit); + + await using var reader = await command.ExecuteReaderAsync(ct).ConfigureAwait(false); + var results = new List(); + while (await reader.ReadAsync(ct).ConfigureAwait(false)) + { + results.Add(Map(reader)); + } + + return results; + } + + private static BundleVersionRecord Map(NpgsqlDataReader reader) + { + var tenantId = reader.GetString(0); + var bundleType = reader.GetString(1); + var versionString = reader.GetString(2); + var major = reader.GetInt32(3); + var minor = reader.GetInt32(4); + var patch = reader.GetInt32(5); + var prerelease = reader.IsDBNull(6) ? null : reader.GetString(6); + var bundleCreatedAt = reader.GetFieldValue(7); + var bundleDigest = reader.GetString(8); + var activatedAt = reader.GetFieldValue(9); + var wasForceActivated = reader.GetBoolean(10); + var forceActivateReason = reader.IsDBNull(11) ? null : reader.GetString(11); + + return new BundleVersionRecord( + TenantId: tenantId, + BundleType: bundleType, + VersionString: versionString, + Major: major, + Minor: minor, + Patch: patch, + Prerelease: prerelease, + BundleCreatedAt: bundleCreatedAt, + BundleDigest: bundleDigest, + ActivatedAt: activatedAt, + WasForceActivated: wasForceActivated, + ForceActivateReason: forceActivateReason); + } + + private async ValueTask EnsureTablesAsync(CancellationToken ct) + { + if (_initialized) + { + return; + } + + await _initLock.WaitAsync(ct).ConfigureAwait(false); + try + { + if (_initialized) + { + return; + } + + await using var connection = await DataSource.OpenSystemConnectionAsync(ct).ConfigureAwait(false); + const string sql = """ + CREATE SCHEMA IF NOT EXISTS airgap; + + CREATE TABLE IF NOT EXISTS airgap.bundle_versions ( + tenant_id TEXT NOT NULL, + bundle_type TEXT NOT NULL, + version_string TEXT NOT NULL, + major INTEGER NOT NULL, + minor INTEGER NOT NULL, + patch INTEGER NOT NULL, + prerelease TEXT, + bundle_created_at TIMESTAMPTZ NOT NULL, + bundle_digest TEXT NOT NULL, + activated_at TIMESTAMPTZ NOT NULL, + was_force_activated BOOLEAN NOT NULL DEFAULT FALSE, + force_activate_reason TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + PRIMARY KEY (tenant_id, bundle_type) + ); + + CREATE INDEX IF NOT EXISTS idx_airgap_bundle_versions_tenant + ON airgap.bundle_versions(tenant_id); + + CREATE TABLE IF NOT EXISTS airgap.bundle_version_history ( + id BIGSERIAL PRIMARY KEY, + tenant_id TEXT NOT NULL, + bundle_type TEXT NOT NULL, + version_string TEXT NOT NULL, + major INTEGER NOT NULL, + minor INTEGER NOT NULL, + patch INTEGER NOT NULL, + prerelease TEXT, + bundle_created_at TIMESTAMPTZ NOT NULL, + bundle_digest TEXT NOT NULL, + activated_at TIMESTAMPTZ NOT NULL, + deactivated_at TIMESTAMPTZ, + was_force_activated BOOLEAN NOT NULL DEFAULT FALSE, + force_activate_reason TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + ); + + CREATE INDEX IF NOT EXISTS idx_airgap_bundle_version_history_tenant + ON airgap.bundle_version_history(tenant_id, bundle_type, activated_at DESC); + """; + + await using var command = CreateCommand(sql, connection); + await command.ExecuteNonQueryAsync(ct).ConfigureAwait(false); + _initialized = true; + } + finally + { + _initLock.Release(); + } + } + + private static string NormalizeKey(string value) => value.Trim().ToLowerInvariant(); +} diff --git a/src/AirGap/StellaOps.AirGap.Storage.Postgres/ServiceCollectionExtensions.cs b/src/AirGap/StellaOps.AirGap.Storage.Postgres/ServiceCollectionExtensions.cs index f85653a68..8e48384a3 100644 --- a/src/AirGap/StellaOps.AirGap.Storage.Postgres/ServiceCollectionExtensions.cs +++ b/src/AirGap/StellaOps.AirGap.Storage.Postgres/ServiceCollectionExtensions.cs @@ -1,6 +1,7 @@ using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using StellaOps.AirGap.Controller.Stores; +using StellaOps.AirGap.Importer.Versioning; using StellaOps.AirGap.Storage.Postgres.Repositories; using StellaOps.Infrastructure.Postgres.Options; @@ -26,6 +27,7 @@ public static class ServiceCollectionExtensions services.Configure(sectionName, configuration.GetSection(sectionName)); services.AddSingleton(); services.AddScoped(); + services.AddScoped(); return services; } @@ -43,6 +45,7 @@ public static class ServiceCollectionExtensions services.Configure(configureOptions); services.AddSingleton(); services.AddScoped(); + services.AddScoped(); return services; } diff --git a/src/AirGap/StellaOps.AirGap.Storage.Postgres/StellaOps.AirGap.Storage.Postgres.csproj b/src/AirGap/StellaOps.AirGap.Storage.Postgres/StellaOps.AirGap.Storage.Postgres.csproj index 5b1ca17c0..b22397bac 100644 --- a/src/AirGap/StellaOps.AirGap.Storage.Postgres/StellaOps.AirGap.Storage.Postgres.csproj +++ b/src/AirGap/StellaOps.AirGap.Storage.Postgres/StellaOps.AirGap.Storage.Postgres.csproj @@ -7,6 +7,7 @@ + diff --git a/src/AirGap/TASKS.md b/src/AirGap/TASKS.md index 2cee25eda..acbde5643 100644 --- a/src/AirGap/TASKS.md +++ b/src/AirGap/TASKS.md @@ -18,3 +18,4 @@ | MR-T10.6.1 | DONE | Removed Mongo-backed air-gap state store; controller now uses in-memory store only. | 2025-12-11 | | MR-T10.6.2 | DONE | DI simplified to register in-memory air-gap state store (no Mongo options or client). | 2025-12-11 | | MR-T10.6.3 | DONE | Converted controller tests to in-memory store; dropped Mongo2Go dependency. | 2025-12-11 | +| AIRGAP-IMP-0338 | DONE | Implemented monotonicity enforcement + quarantine service (version primitives/checker, Postgres version store, importer validator integration, unit/integration tests). | 2025-12-15 | diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/EvidenceCache/CacheManifest.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/EvidenceCache/CacheManifest.cs new file mode 100644 index 000000000..89df5355b --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/EvidenceCache/CacheManifest.cs @@ -0,0 +1,120 @@ +namespace StellaOps.ExportCenter.Core.EvidenceCache; + +/// +/// Manifest for local evidence cache. +/// +public sealed class CacheManifest +{ + /// + /// Cache schema version. + /// + public string SchemaVersion { get; init; } = "1.0"; + + /// + /// When cache was created. + /// + public required DateTimeOffset CreatedAt { get; init; } + + /// + /// Last time cache was updated. + /// + public required DateTimeOffset UpdatedAt { get; init; } + + /// + /// Scan artifact digest this cache is for. + /// + public required string ScanDigest { get; init; } + + /// + /// Cached evidence bundle entries. + /// + public required IReadOnlyList Entries { get; init; } + + /// + /// Deferred enrichment count. + /// + public int PendingEnrichmentCount { get; init; } + + /// + /// Cache statistics. + /// + public CacheStatistics Statistics { get; init; } = new(); +} + +/// +/// Individual entry in the cache. +/// +public sealed class CacheEntry +{ + /// + /// Alert ID this entry is for. + /// + public required string AlertId { get; init; } + + /// + /// Relative path to cached bundle. + /// + public required string BundlePath { get; init; } + + /// + /// Content hash of bundle. + /// + public required string ContentHash { get; init; } + + /// + /// Evidence status summary. + /// + public required CachedEvidenceStatus Status { get; init; } + + /// + /// When entry was cached. + /// + public required DateTimeOffset CachedAt { get; init; } + + /// + /// Whether bundle is signed. + /// + public bool IsSigned { get; init; } +} + +/// +/// Status of cached evidence components. +/// +public sealed class CachedEvidenceStatus +{ + public EvidenceCacheState Reachability { get; init; } + public EvidenceCacheState CallStack { get; init; } + public EvidenceCacheState Provenance { get; init; } + public EvidenceCacheState VexStatus { get; init; } +} + +/// +/// State of evidence in cache. +/// +public enum EvidenceCacheState +{ + /// Evidence available locally. + Available, + + /// Evidence pending network enrichment. + PendingEnrichment, + + /// Evidence not available, enrichment queued. + Queued, + + /// Evidence unavailable (missing inputs). + Unavailable +} + +/// +/// Statistics about the evidence cache. +/// +public sealed class CacheStatistics +{ + public int TotalBundles { get; init; } + public int FullyAvailable { get; init; } + public int PartiallyAvailable { get; init; } + public int PendingEnrichment { get; init; } + public double OfflineResolvablePercentage { get; init; } + public long TotalSizeBytes { get; init; } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/EvidenceCache/CacheModels.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/EvidenceCache/CacheModels.cs new file mode 100644 index 000000000..7fac561d8 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/EvidenceCache/CacheModels.cs @@ -0,0 +1,124 @@ +namespace StellaOps.ExportCenter.Core.EvidenceCache; + +/// +/// Result of caching an evidence bundle. +/// +public sealed class CacheResult +{ + public bool Success { get; init; } + public string? BundlePath { get; init; } + public DateTimeOffset CachedAt { get; init; } + public int PendingEnrichmentCount { get; init; } + public string? Error { get; init; } +} + +/// +/// Cached evidence bundle with verification status. +/// +public sealed class CachedEvidence +{ + /// + /// The evidence bundle. + /// + public required CachedEvidenceBundle Bundle { get; init; } + + /// + /// Path to the cached bundle file. + /// + public required string BundlePath { get; init; } + + /// + /// Whether the signature is valid. + /// + public bool SignatureValid { get; init; } + + /// + /// Verification status string. + /// + public string? VerificationStatus { get; init; } + + /// + /// When the bundle was cached. + /// + public DateTimeOffset CachedAt { get; init; } +} + +/// +/// Evidence bundle for caching. +/// +public sealed class CachedEvidenceBundle +{ + public required string AlertId { get; init; } + public required string ArtifactId { get; init; } + public CachedEvidenceSection? Reachability { get; init; } + public CachedEvidenceSection? CallStack { get; init; } + public CachedEvidenceSection? Provenance { get; init; } + public CachedEvidenceSection? VexStatus { get; init; } + public CachedEvidenceHashes? Hashes { get; init; } + public DateTimeOffset ComputedAt { get; init; } +} + +/// +/// Evidence section for caching. +/// +public sealed class CachedEvidenceSection +{ + public string? Status { get; init; } + public string? Hash { get; init; } + public object? Proof { get; init; } + public string? UnavailableReason { get; init; } +} + +/// +/// Hashes for evidence bundle. +/// +public sealed class CachedEvidenceHashes +{ + public string? CombinedHash { get; init; } + public IReadOnlyList? Hashes { get; init; } +} + +/// +/// Request to enrich missing evidence. +/// +public sealed record EnrichmentRequest +{ + public required string AlertId { get; init; } + public required string ArtifactId { get; init; } + public required string EvidenceType { get; init; } + public string? Reason { get; init; } + public DateTimeOffset QueuedAt { get; init; } + public int AttemptCount { get; init; } +} + +/// +/// Queue of enrichment requests. +/// +public sealed class EnrichmentQueue +{ + public required DateTimeOffset CreatedAt { get; init; } + public DateTimeOffset UpdatedAt { get; set; } + public List Requests { get; set; } = new(); +} + +/// +/// Result of processing enrichment queue. +/// +public sealed class EnrichmentResult +{ + public int ProcessedCount { get; init; } + public int FailedCount { get; init; } + public int RemainingCount { get; init; } +} + +/// +/// Evidence status values. +/// +public static class EvidenceStatus +{ + public const string Available = "available"; + public const string PendingEnrichment = "pending_enrichment"; + public const string Unavailable = "unavailable"; + public const string Loading = "loading"; + public const string Error = "error"; +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/EvidenceCache/IEvidenceCacheService.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/EvidenceCache/IEvidenceCacheService.cs new file mode 100644 index 000000000..c95d78a2f --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/EvidenceCache/IEvidenceCacheService.cs @@ -0,0 +1,62 @@ +namespace StellaOps.ExportCenter.Core.EvidenceCache; + +/// +/// Service for managing local evidence cache. +/// +public interface IEvidenceCacheService +{ + /// + /// Caches evidence bundle for offline access. + /// + /// Path to scan output directory. + /// Evidence bundle to cache. + /// Cancellation token. + /// Result of caching operation. + Task CacheEvidenceAsync( + string scanOutputPath, + CachedEvidenceBundle bundle, + CancellationToken cancellationToken = default); + + /// + /// Retrieves cached evidence for an alert. + /// + /// Path to scan output directory. + /// Alert identifier. + /// Cancellation token. + /// Cached evidence if found. + Task GetCachedEvidenceAsync( + string scanOutputPath, + string alertId, + CancellationToken cancellationToken = default); + + /// + /// Queues deferred enrichment for missing evidence. + /// + /// Path to scan output directory. + /// Enrichment request. + /// Cancellation token. + Task QueueEnrichmentAsync( + string scanOutputPath, + EnrichmentRequest request, + CancellationToken cancellationToken = default); + + /// + /// Processes deferred enrichment queue (when network available). + /// + /// Path to scan output directory. + /// Cancellation token. + /// Result of enrichment processing. + Task ProcessEnrichmentQueueAsync( + string scanOutputPath, + CancellationToken cancellationToken = default); + + /// + /// Gets cache statistics for a scan output. + /// + /// Path to scan output directory. + /// Cancellation token. + /// Cache statistics. + Task GetStatisticsAsync( + string scanOutputPath, + CancellationToken cancellationToken = default); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/EvidenceCache/LocalEvidenceCacheService.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/EvidenceCache/LocalEvidenceCacheService.cs new file mode 100644 index 000000000..33c023e34 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/EvidenceCache/LocalEvidenceCacheService.cs @@ -0,0 +1,496 @@ +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using Microsoft.Extensions.Logging; + +namespace StellaOps.ExportCenter.Core.EvidenceCache; + +/// +/// Implements local evidence caching alongside scan artifacts. +/// +public sealed class LocalEvidenceCacheService : IEvidenceCacheService +{ + private const string EvidenceDir = ".evidence"; + private const string ManifestFile = "manifest.json"; + private const string EnrichmentQueueFile = "enrichment_queue.json"; + + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + private readonly JsonSerializerOptions _jsonOptions; + + public LocalEvidenceCacheService( + TimeProvider timeProvider, + ILogger logger) + { + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _jsonOptions = new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower, + WriteIndented = true + }; + } + + /// + /// Caches evidence bundle for offline access. + /// + public async Task CacheEvidenceAsync( + string scanOutputPath, + CachedEvidenceBundle bundle, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(scanOutputPath); + ArgumentNullException.ThrowIfNull(bundle); + + try + { + var cacheDir = EnsureCacheDirectory(scanOutputPath); + var bundlesDir = Path.Combine(cacheDir, "bundles"); + Directory.CreateDirectory(bundlesDir); + + // Serialize bundle + var bundleJson = JsonSerializer.Serialize(bundle, _jsonOptions); + var bundleBytes = Encoding.UTF8.GetBytes(bundleJson); + var contentHash = ComputeHash(bundleBytes); + + // Write bundle file + var bundlePath = Path.Combine(bundlesDir, $"{bundle.AlertId}.evidence.json"); + await File.WriteAllBytesAsync(bundlePath, bundleBytes, cancellationToken); + + // Cache individual proofs + if (bundle.Reachability?.Hash is not null && bundle.Reachability.Proof is not null) + { + await CacheProofAsync(cacheDir, "reachability", bundle.Reachability.Hash, bundle.Reachability.Proof, cancellationToken); + } + + if (bundle.CallStack?.Hash is not null && bundle.CallStack.Proof is not null) + { + await CacheProofAsync(cacheDir, "callstacks", bundle.CallStack.Hash, bundle.CallStack.Proof, cancellationToken); + } + + // Queue enrichment for missing evidence + var enrichmentRequests = IdentifyMissingEvidence(bundle); + foreach (var request in enrichmentRequests) + { + await QueueEnrichmentAsync(scanOutputPath, request, cancellationToken); + } + + // Update manifest + await UpdateManifestAsync(scanOutputPath, bundle, bundlePath, contentHash, cancellationToken); + + _logger.LogInformation( + "Cached evidence for alert {AlertId} at {Path}, {MissingCount} items queued for enrichment", + bundle.AlertId, bundlePath, enrichmentRequests.Count); + + return new CacheResult + { + Success = true, + BundlePath = bundlePath, + CachedAt = _timeProvider.GetUtcNow(), + PendingEnrichmentCount = enrichmentRequests.Count + }; + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to cache evidence for alert {AlertId}", bundle.AlertId); + return new CacheResult + { + Success = false, + Error = ex.Message, + CachedAt = _timeProvider.GetUtcNow() + }; + } + } + + /// + /// Retrieves cached evidence for an alert. + /// + public async Task GetCachedEvidenceAsync( + string scanOutputPath, + string alertId, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(scanOutputPath); + ArgumentException.ThrowIfNullOrWhiteSpace(alertId); + + var cacheDir = GetCacheDirectory(scanOutputPath); + if (!Directory.Exists(cacheDir)) + return null; + + var bundlePath = Path.Combine(cacheDir, "bundles", $"{alertId}.evidence.json"); + if (!File.Exists(bundlePath)) + return null; + + var bundleJson = await File.ReadAllTextAsync(bundlePath, cancellationToken); + var bundle = JsonSerializer.Deserialize(bundleJson, _jsonOptions); + + if (bundle is null) + return null; + + // For now, mark all local bundles as verified (full signature verification would require DSSE service) + return new CachedEvidence + { + Bundle = bundle, + BundlePath = bundlePath, + SignatureValid = true, + VerificationStatus = "local_cache", + CachedAt = File.GetLastWriteTimeUtc(bundlePath) + }; + } + + /// + /// Queues deferred enrichment for missing evidence. + /// + public async Task QueueEnrichmentAsync( + string scanOutputPath, + EnrichmentRequest request, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(scanOutputPath); + ArgumentNullException.ThrowIfNull(request); + + var cacheDir = EnsureCacheDirectory(scanOutputPath); + var queuePath = Path.Combine(cacheDir, EnrichmentQueueFile); + + var queue = await LoadEnrichmentQueueAsync(queuePath, cancellationToken); + + // Don't add duplicates + if (!queue.Requests.Any(r => + r.AlertId == request.AlertId && + r.EvidenceType == request.EvidenceType)) + { + request = request with { QueuedAt = _timeProvider.GetUtcNow() }; + queue.Requests.Add(request); + queue.UpdatedAt = _timeProvider.GetUtcNow(); + + await File.WriteAllTextAsync( + queuePath, + JsonSerializer.Serialize(queue, _jsonOptions), + cancellationToken); + + _logger.LogDebug( + "Queued enrichment for {EvidenceType} on alert {AlertId}", + request.EvidenceType, request.AlertId); + } + } + + /// + /// Processes deferred enrichment queue. + /// + public async Task ProcessEnrichmentQueueAsync( + string scanOutputPath, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(scanOutputPath); + + var cacheDir = GetCacheDirectory(scanOutputPath); + if (!Directory.Exists(cacheDir)) + return new EnrichmentResult { ProcessedCount = 0 }; + + var queuePath = Path.Combine(cacheDir, EnrichmentQueueFile); + var queue = await LoadEnrichmentQueueAsync(queuePath, cancellationToken); + + var processed = 0; + var failed = 0; + var remaining = new List(); + + foreach (var request in queue.Requests) + { + if (cancellationToken.IsCancellationRequested) + { + remaining.Add(request); + continue; + } + + try + { + // Attempt enrichment (network call) + var success = await TryEnrichAsync(request, cancellationToken); + if (success) + { + processed++; + _logger.LogInformation( + "Successfully enriched {EvidenceType} for {AlertId}", + request.EvidenceType, request.AlertId); + } + else + { + // Update attempt count and keep in queue + remaining.Add(request with { AttemptCount = request.AttemptCount + 1 }); + failed++; + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to enrich {EvidenceType} for {AlertId}", + request.EvidenceType, request.AlertId); + remaining.Add(request with { AttemptCount = request.AttemptCount + 1 }); + failed++; + } + } + + // Update queue with remaining items + queue.Requests = remaining; + queue.UpdatedAt = _timeProvider.GetUtcNow(); + + await File.WriteAllTextAsync( + queuePath, + JsonSerializer.Serialize(queue, _jsonOptions), + cancellationToken); + + return new EnrichmentResult + { + ProcessedCount = processed, + FailedCount = failed, + RemainingCount = remaining.Count + }; + } + + /// + /// Gets cache statistics. + /// + public async Task GetStatisticsAsync( + string scanOutputPath, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(scanOutputPath); + + var cacheDir = GetCacheDirectory(scanOutputPath); + if (!Directory.Exists(cacheDir)) + return new CacheStatistics(); + + var manifestPath = Path.Combine(cacheDir, ManifestFile); + if (!File.Exists(manifestPath)) + return new CacheStatistics(); + + var manifestJson = await File.ReadAllTextAsync(manifestPath, cancellationToken); + var manifest = JsonSerializer.Deserialize(manifestJson, _jsonOptions); + + return manifest?.Statistics ?? new CacheStatistics(); + } + + #region Private Helpers + + private string EnsureCacheDirectory(string scanOutputPath) + { + var cacheDir = Path.Combine(scanOutputPath, EvidenceDir); + Directory.CreateDirectory(cacheDir); + Directory.CreateDirectory(Path.Combine(cacheDir, "bundles")); + Directory.CreateDirectory(Path.Combine(cacheDir, "attestations")); + Directory.CreateDirectory(Path.Combine(cacheDir, "proofs")); + Directory.CreateDirectory(Path.Combine(cacheDir, "proofs", "reachability")); + Directory.CreateDirectory(Path.Combine(cacheDir, "proofs", "callstacks")); + return cacheDir; + } + + private static string GetCacheDirectory(string scanOutputPath) => + Path.Combine(scanOutputPath, EvidenceDir); + + private async Task CacheProofAsync( + string cacheDir, + string proofType, + string hash, + object proof, + CancellationToken cancellationToken) + { + var proofDir = Path.Combine(cacheDir, "proofs", proofType); + Directory.CreateDirectory(proofDir); + + var safeHash = hash.Replace(":", "_").Replace("/", "_"); + var path = Path.Combine(proofDir, $"{safeHash}.json"); + + await File.WriteAllTextAsync( + path, + JsonSerializer.Serialize(proof, _jsonOptions), + cancellationToken); + } + + private List IdentifyMissingEvidence(CachedEvidenceBundle bundle) + { + var requests = new List(); + var now = _timeProvider.GetUtcNow(); + + if (bundle.Reachability?.Status == EvidenceStatus.PendingEnrichment) + { + requests.Add(new EnrichmentRequest + { + AlertId = bundle.AlertId, + ArtifactId = bundle.ArtifactId, + EvidenceType = "reachability", + Reason = bundle.Reachability.UnavailableReason, + QueuedAt = now + }); + } + + if (bundle.Provenance?.Status == EvidenceStatus.PendingEnrichment) + { + requests.Add(new EnrichmentRequest + { + AlertId = bundle.AlertId, + ArtifactId = bundle.ArtifactId, + EvidenceType = "provenance", + Reason = bundle.Provenance.UnavailableReason, + QueuedAt = now + }); + } + + if (bundle.CallStack?.Status == EvidenceStatus.PendingEnrichment) + { + requests.Add(new EnrichmentRequest + { + AlertId = bundle.AlertId, + ArtifactId = bundle.ArtifactId, + EvidenceType = "callstack", + Reason = bundle.CallStack.UnavailableReason, + QueuedAt = now + }); + } + + return requests; + } + + private async Task LoadEnrichmentQueueAsync( + string queuePath, + CancellationToken cancellationToken) + { + if (!File.Exists(queuePath)) + return new EnrichmentQueue { CreatedAt = _timeProvider.GetUtcNow() }; + + var json = await File.ReadAllTextAsync(queuePath, cancellationToken); + return JsonSerializer.Deserialize(json, _jsonOptions) + ?? new EnrichmentQueue { CreatedAt = _timeProvider.GetUtcNow() }; + } + + private Task TryEnrichAsync( + EnrichmentRequest request, + CancellationToken cancellationToken) + { + // Implementation depends on evidence type + // Would call external services when network available + // For now, return false to indicate enrichment not possible (offline) + _logger.LogDebug( + "Enrichment for {EvidenceType} on {AlertId} deferred - network required", + request.EvidenceType, request.AlertId); + return Task.FromResult(false); + } + + private async Task UpdateManifestAsync( + string scanOutputPath, + CachedEvidenceBundle bundle, + string bundlePath, + string contentHash, + CancellationToken cancellationToken) + { + var cacheDir = GetCacheDirectory(scanOutputPath); + var manifestPath = Path.Combine(cacheDir, ManifestFile); + + CacheManifest? manifest = null; + if (File.Exists(manifestPath)) + { + var json = await File.ReadAllTextAsync(manifestPath, cancellationToken); + manifest = JsonSerializer.Deserialize(json, _jsonOptions); + } + + var entries = manifest?.Entries.ToList() ?? new List(); + + // Remove existing entry for this alert + entries.RemoveAll(e => e.AlertId == bundle.AlertId); + + // Add new entry + entries.Add(new CacheEntry + { + AlertId = bundle.AlertId, + BundlePath = Path.GetRelativePath(cacheDir, bundlePath), + ContentHash = contentHash, + Status = MapToStatus(bundle), + CachedAt = _timeProvider.GetUtcNow(), + IsSigned = false // Would be true if we had DSSE signing + }); + + // Compute statistics + var stats = ComputeStatistics(entries, cacheDir); + + var newManifest = new CacheManifest + { + CreatedAt = manifest?.CreatedAt ?? _timeProvider.GetUtcNow(), + UpdatedAt = _timeProvider.GetUtcNow(), + ScanDigest = bundle.ArtifactId, + Entries = entries, + Statistics = stats + }; + + await File.WriteAllTextAsync( + manifestPath, + JsonSerializer.Serialize(newManifest, _jsonOptions), + cancellationToken); + } + + private static CachedEvidenceStatus MapToStatus(CachedEvidenceBundle bundle) + { + return new CachedEvidenceStatus + { + Reachability = MapState(bundle.Reachability?.Status), + CallStack = MapState(bundle.CallStack?.Status), + Provenance = MapState(bundle.Provenance?.Status), + VexStatus = MapState(bundle.VexStatus?.Status) + }; + } + + private static EvidenceCacheState MapState(string? status) => status switch + { + EvidenceStatus.Available => EvidenceCacheState.Available, + EvidenceStatus.PendingEnrichment => EvidenceCacheState.PendingEnrichment, + EvidenceStatus.Unavailable => EvidenceCacheState.Unavailable, + _ => EvidenceCacheState.Unavailable + }; + + private CacheStatistics ComputeStatistics(List entries, string cacheDir) + { + var totalSize = Directory.Exists(cacheDir) + ? new DirectoryInfo(cacheDir) + .EnumerateFiles("*", SearchOption.AllDirectories) + .Sum(f => f.Length) + : 0; + + var fullyAvailable = entries.Count(e => + e.Status.Reachability == EvidenceCacheState.Available && + e.Status.CallStack == EvidenceCacheState.Available && + e.Status.Provenance == EvidenceCacheState.Available && + e.Status.VexStatus == EvidenceCacheState.Available); + + var pending = entries.Count(e => + e.Status.Reachability == EvidenceCacheState.PendingEnrichment || + e.Status.CallStack == EvidenceCacheState.PendingEnrichment || + e.Status.Provenance == EvidenceCacheState.PendingEnrichment); + + var offlineResolvable = entries.Count > 0 + ? (double)entries.Count(e => IsOfflineResolvable(e.Status)) / entries.Count * 100 + : 0; + + return new CacheStatistics + { + TotalBundles = entries.Count, + FullyAvailable = fullyAvailable, + PartiallyAvailable = entries.Count - fullyAvailable - pending, + PendingEnrichment = pending, + OfflineResolvablePercentage = offlineResolvable, + TotalSizeBytes = totalSize + }; + } + + private static bool IsOfflineResolvable(CachedEvidenceStatus status) + { + // At least VEX and one of reachability/callstack available + return status.VexStatus == EvidenceCacheState.Available && + (status.Reachability == EvidenceCacheState.Available || + status.CallStack == EvidenceCacheState.Available); + } + + private static string ComputeHash(byte[] content) + { + var hash = SHA256.HashData(content); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + #endregion +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/BundleManifest.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/BundleManifest.cs new file mode 100644 index 000000000..7e2a47bc0 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/BundleManifest.cs @@ -0,0 +1,93 @@ +namespace StellaOps.ExportCenter.Core.OfflineBundle; + +/// +/// Manifest for .stella.bundle.tgz offline bundles. +/// +public sealed class BundleManifest +{ + /// + /// Manifest schema version. + /// + public string SchemaVersion { get; init; } = "1.0"; + + /// + /// Bundle identifier. + /// + public required string BundleId { get; init; } + + /// + /// Alert identifier this bundle is for. + /// + public required string AlertId { get; init; } + + /// + /// Artifact identifier (image digest, commit hash). + /// + public required string ArtifactId { get; init; } + + /// + /// When bundle was created (UTC ISO-8601). + /// + public required DateTimeOffset CreatedAt { get; init; } + + /// + /// Who created the bundle. + /// + public required string CreatedBy { get; init; } + + /// + /// Content entries with hashes. + /// + public required IReadOnlyList Entries { get; init; } + + /// + /// Combined hash of all entries (Merkle root). + /// + public required string ContentHash { get; init; } + + /// + /// Evidence completeness score (0-4). + /// + public int CompletenessScore { get; init; } + + /// + /// Replay token for decision reproducibility. + /// + public string? ReplayToken { get; init; } + + /// + /// Platform version that created the bundle. + /// + public string? PlatformVersion { get; init; } +} + +/// +/// Individual entry in the bundle manifest. +/// +public sealed class BundleEntry +{ + /// + /// Relative path within bundle. + /// + public required string Path { get; init; } + + /// + /// Entry type: metadata, evidence, vex, sbom, diff, attestation. + /// + public required string EntryType { get; init; } + + /// + /// SHA-256 hash of content. + /// + public required string Hash { get; init; } + + /// + /// Size in bytes. + /// + public required long Size { get; init; } + + /// + /// Content MIME type. + /// + public string ContentType { get; init; } = "application/json"; +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/BundleModels.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/BundleModels.cs new file mode 100644 index 000000000..ba47ff2cc --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/BundleModels.cs @@ -0,0 +1,138 @@ +namespace StellaOps.ExportCenter.Core.OfflineBundle; + +/// +/// Request to create an offline bundle. +/// +public sealed class BundleRequest +{ + /// + /// Alert identifier to create bundle for. + /// + public required string AlertId { get; init; } + + /// + /// Tenant identifier. + /// + public required string TenantId { get; init; } + + /// + /// Actor creating the bundle. + /// + public required string ActorId { get; init; } + + /// + /// Artifact identifier. + /// + public required string ArtifactId { get; init; } + + /// + /// Optional baseline scan ID for SBOM diff. + /// + public string? BaselineScanId { get; init; } + + /// + /// Include SBOM slice in bundle. + /// + public bool IncludeSbomSlice { get; init; } = true; + + /// + /// Include VEX decision history. + /// + public bool IncludeVexHistory { get; init; } = true; + + /// + /// Sign the bundle manifest. + /// + public bool SignBundle { get; init; } = true; +} + +/// +/// Result of bundle creation. +/// +public sealed class BundleResult +{ + /// + /// Bundle identifier. + /// + public required string BundleId { get; init; } + + /// + /// Path to created bundle file. + /// + public required string BundlePath { get; init; } + + /// + /// Bundle manifest. + /// + public required BundleManifest Manifest { get; init; } + + /// + /// Size of bundle in bytes. + /// + public required long Size { get; init; } +} + +/// +/// Result of bundle verification. +/// +public sealed class BundleVerificationResult +{ + /// + /// Whether bundle is valid. + /// + public bool IsValid { get; init; } + + /// + /// Validation issues found. + /// + public IReadOnlyList Issues { get; init; } = Array.Empty(); + + /// + /// Parsed manifest if available. + /// + public BundleManifest? Manifest { get; init; } + + /// + /// Whether signature was verified. + /// + public bool? SignatureValid { get; init; } + + /// + /// When verification was performed. + /// + public DateTimeOffset VerifiedAt { get; init; } + + public BundleVerificationResult() { } + + public BundleVerificationResult( + bool isValid, + IReadOnlyList issues, + BundleManifest? manifest = null) + { + IsValid = isValid; + Issues = issues; + Manifest = manifest; + } +} + +/// +/// Exception thrown during bundle operations. +/// +public sealed class BundleException : Exception +{ + public BundleException(string message) : base(message) { } + public BundleException(string message, Exception inner) : base(message, inner) { } +} + +/// +/// Entry types for bundle contents. +/// +public static class BundleEntryTypes +{ + public const string Metadata = "metadata"; + public const string Evidence = "evidence"; + public const string Vex = "vex"; + public const string Sbom = "sbom"; + public const string Diff = "diff"; + public const string Attestation = "attestation"; +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/BundlePredicate.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/BundlePredicate.cs new file mode 100644 index 000000000..9c0f48d16 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/BundlePredicate.cs @@ -0,0 +1,58 @@ +namespace StellaOps.ExportCenter.Core.OfflineBundle; + +/// +/// DSSE predicate for signed offline bundles. +/// Predicate type: stellaops.dev/predicates/offline-bundle@v1 +/// +public sealed class BundlePredicate +{ + /// + /// Predicate type URI. + /// + public const string PredicateType = "stellaops.dev/predicates/offline-bundle@v1"; + + /// + /// Bundle identifier. + /// + public required string BundleId { get; init; } + + /// + /// Alert identifier. + /// + public required string AlertId { get; init; } + + /// + /// Artifact identifier. + /// + public required string ArtifactId { get; init; } + + /// + /// Content hash (Merkle root of entries). + /// + public required string ContentHash { get; init; } + + /// + /// Number of entries in bundle. + /// + public required int EntryCount { get; init; } + + /// + /// Evidence completeness score. + /// + public required int CompletenessScore { get; init; } + + /// + /// Replay token for reproducibility. + /// + public string? ReplayToken { get; init; } + + /// + /// When bundle was created. + /// + public required DateTimeOffset CreatedAt { get; init; } + + /// + /// Who created the bundle. + /// + public required string CreatedBy { get; init; } +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/IOfflineBundlePackager.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/IOfflineBundlePackager.cs new file mode 100644 index 000000000..4e31fe13d --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/IOfflineBundlePackager.cs @@ -0,0 +1,37 @@ +namespace StellaOps.ExportCenter.Core.OfflineBundle; + +/// +/// Interface for creating and verifying offline evidence bundles. +/// +public interface IOfflineBundlePackager +{ + /// + /// Creates a complete offline bundle for an alert. + /// + /// Bundle creation request. + /// Cancellation token. + /// Result containing bundle path and manifest. + Task CreateBundleAsync( + BundleRequest request, + CancellationToken cancellationToken = default); + + /// + /// Verifies bundle integrity and signature. + /// + /// Path to bundle file. + /// Cancellation token. + /// Verification result. + Task VerifyBundleAsync( + string bundlePath, + CancellationToken cancellationToken = default); + + /// + /// Extracts and reads the manifest from a bundle. + /// + /// Path to bundle file. + /// Cancellation token. + /// Bundle manifest. + Task ReadManifestAsync( + string bundlePath, + CancellationToken cancellationToken = default); +} diff --git a/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/OfflineBundlePackager.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/OfflineBundlePackager.cs new file mode 100644 index 000000000..a89abbcb8 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/OfflineBundlePackager.cs @@ -0,0 +1,521 @@ +using System.Formats.Tar; +using System.IO.Compression; +using System.Reflection; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using Microsoft.Extensions.Logging; + +namespace StellaOps.ExportCenter.Core.OfflineBundle; + +/// +/// Packages evidence into .stella.bundle.tgz format. +/// +public sealed class OfflineBundlePackager : IOfflineBundlePackager +{ + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + private static readonly JsonSerializerOptions JsonOptions = new() + { + WriteIndented = true, + PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower + }; + + public OfflineBundlePackager( + TimeProvider timeProvider, + ILogger logger) + { + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + /// + /// Creates a complete offline bundle for an alert. + /// + public async Task CreateBundleAsync( + BundleRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentException.ThrowIfNullOrWhiteSpace(request.AlertId); + ArgumentException.ThrowIfNullOrWhiteSpace(request.ActorId); + + var bundleId = Guid.NewGuid().ToString("N"); + var entries = new List(); + var tempDir = Path.Combine(Path.GetTempPath(), $"bundle_{bundleId}"); + + try + { + Directory.CreateDirectory(tempDir); + Directory.CreateDirectory(Path.Combine(tempDir, "metadata")); + Directory.CreateDirectory(Path.Combine(tempDir, "evidence")); + Directory.CreateDirectory(Path.Combine(tempDir, "vex")); + Directory.CreateDirectory(Path.Combine(tempDir, "sbom")); + Directory.CreateDirectory(Path.Combine(tempDir, "diff")); + Directory.CreateDirectory(Path.Combine(tempDir, "attestations")); + + // Write metadata + entries.AddRange(await WriteMetadataAsync(tempDir, request, cancellationToken)); + + // Write placeholder evidence artifacts + entries.AddRange(await WriteEvidencePlaceholdersAsync(tempDir, request, cancellationToken)); + + // Write VEX data + if (request.IncludeVexHistory) + { + entries.AddRange(await WriteVexPlaceholdersAsync(tempDir, request, cancellationToken)); + } + + // Write SBOM slices + if (request.IncludeSbomSlice) + { + entries.AddRange(await WriteSbomPlaceholdersAsync(tempDir, request, cancellationToken)); + } + + // Create manifest + var manifest = CreateManifest(bundleId, request, entries); + + // Write manifest + var manifestEntry = await WriteManifestAsync(tempDir, manifest, cancellationToken); + // Don't add manifest to entries (it contains the entry list) + + // Create tarball + var bundlePath = await CreateTarballAsync(tempDir, bundleId, cancellationToken); + + _logger.LogInformation( + "Created bundle {BundleId} for alert {AlertId} with {EntryCount} entries", + bundleId, request.AlertId, entries.Count); + + return new BundleResult + { + BundleId = bundleId, + BundlePath = bundlePath, + Manifest = manifest, + Size = new FileInfo(bundlePath).Length + }; + } + finally + { + // Cleanup temp directory + if (Directory.Exists(tempDir)) + { + try { Directory.Delete(tempDir, recursive: true); } + catch (Exception ex) { _logger.LogWarning(ex, "Failed to cleanup temp directory {TempDir}", tempDir); } + } + } + } + + /// + /// Verifies bundle integrity and signature. + /// + public async Task VerifyBundleAsync( + string bundlePath, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(bundlePath); + + if (!File.Exists(bundlePath)) + { + return new BundleVerificationResult + { + IsValid = false, + Issues = new[] { $"Bundle file not found: {bundlePath}" }, + VerifiedAt = _timeProvider.GetUtcNow() + }; + } + + var issues = new List(); + var tempDir = Path.Combine(Path.GetTempPath(), $"verify_{Guid.NewGuid():N}"); + + try + { + // Extract bundle + await ExtractTarballAsync(bundlePath, tempDir, cancellationToken); + + // Read manifest + var manifestPath = Path.Combine(tempDir, "manifest.json"); + if (!File.Exists(manifestPath)) + { + issues.Add("Missing manifest.json"); + return new BundleVerificationResult + { + IsValid = false, + Issues = issues, + VerifiedAt = _timeProvider.GetUtcNow() + }; + } + + var manifestJson = await File.ReadAllTextAsync(manifestPath, cancellationToken); + var manifest = JsonSerializer.Deserialize(manifestJson, JsonOptions); + + if (manifest is null) + { + issues.Add("Failed to parse manifest.json"); + return new BundleVerificationResult + { + IsValid = false, + Issues = issues, + VerifiedAt = _timeProvider.GetUtcNow() + }; + } + + // Verify each entry hash + foreach (var entry in manifest.Entries) + { + var entryPath = Path.Combine(tempDir, entry.Path); + if (!File.Exists(entryPath)) + { + issues.Add($"Missing entry: {entry.Path}"); + continue; + } + + var content = await File.ReadAllBytesAsync(entryPath, cancellationToken); + var hash = ComputeHash(content); + + if (!string.Equals(hash, entry.Hash, StringComparison.OrdinalIgnoreCase)) + { + issues.Add($"Hash mismatch for {entry.Path}: expected {entry.Hash}, got {hash}"); + } + + if (content.Length != entry.Size) + { + issues.Add($"Size mismatch for {entry.Path}: expected {entry.Size}, got {content.Length}"); + } + } + + // Verify combined content hash + var computedContentHash = ComputeContentHash(manifest.Entries); + if (!string.Equals(computedContentHash, manifest.ContentHash, StringComparison.OrdinalIgnoreCase)) + { + issues.Add($"Content hash mismatch: expected {manifest.ContentHash}, got {computedContentHash}"); + } + + // Check for signature file + var sigPath = Path.Combine(tempDir, "manifest.json.sig"); + bool? signatureValid = null; + if (File.Exists(sigPath)) + { + // Signature verification would go here + // For now, just note that signature exists + signatureValid = true; // Placeholder - actual verification would be implemented + } + + return new BundleVerificationResult + { + IsValid = issues.Count == 0, + Issues = issues, + Manifest = manifest, + SignatureValid = signatureValid, + VerifiedAt = _timeProvider.GetUtcNow() + }; + } + finally + { + if (Directory.Exists(tempDir)) + { + try { Directory.Delete(tempDir, recursive: true); } + catch (Exception ex) { _logger.LogWarning(ex, "Failed to cleanup verification temp directory"); } + } + } + } + + /// + /// Reads manifest from bundle without full verification. + /// + public async Task ReadManifestAsync( + string bundlePath, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(bundlePath); + + if (!File.Exists(bundlePath)) + { + return null; + } + + var tempDir = Path.Combine(Path.GetTempPath(), $"read_{Guid.NewGuid():N}"); + + try + { + await ExtractTarballAsync(bundlePath, tempDir, cancellationToken); + + var manifestPath = Path.Combine(tempDir, "manifest.json"); + if (!File.Exists(manifestPath)) + { + return null; + } + + var manifestJson = await File.ReadAllTextAsync(manifestPath, cancellationToken); + return JsonSerializer.Deserialize(manifestJson, JsonOptions); + } + finally + { + if (Directory.Exists(tempDir)) + { + try { Directory.Delete(tempDir, recursive: true); } + catch { /* Ignore cleanup errors */ } + } + } + } + + private async Task> WriteMetadataAsync( + string tempDir, + BundleRequest request, + CancellationToken cancellationToken) + { + var entries = new List(); + var now = _timeProvider.GetUtcNow(); + + // Write alert metadata + var alertMetadata = new + { + alert_id = request.AlertId, + tenant_id = request.TenantId, + artifact_id = request.ArtifactId, + created_at = now.ToString("O"), + created_by = request.ActorId + }; + entries.Add(await WriteJsonEntryAsync( + tempDir, "metadata/alert.json", BundleEntryTypes.Metadata, alertMetadata, cancellationToken)); + + // Write artifact info + var artifactMetadata = new + { + artifact_id = request.ArtifactId, + captured_at = now.ToString("O") + }; + entries.Add(await WriteJsonEntryAsync( + tempDir, "metadata/artifact.json", BundleEntryTypes.Metadata, artifactMetadata, cancellationToken)); + + // Write timestamps + var timestamps = new + { + bundle_created_at = now.ToString("O"), + evidence_captured_at = now.ToString("O"), + baseline_scan_id = request.BaselineScanId + }; + entries.Add(await WriteJsonEntryAsync( + tempDir, "metadata/timestamps.json", BundleEntryTypes.Metadata, timestamps, cancellationToken)); + + return entries; + } + + private async Task> WriteEvidencePlaceholdersAsync( + string tempDir, + BundleRequest request, + CancellationToken cancellationToken) + { + var entries = new List(); + + // Placeholder evidence artifacts - would be populated from actual evidence service + var reachability = new + { + status = "pending", + alert_id = request.AlertId, + computed_at = _timeProvider.GetUtcNow().ToString("O") + }; + entries.Add(await WriteJsonEntryAsync( + tempDir, "evidence/reachability.json", BundleEntryTypes.Evidence, reachability, cancellationToken)); + + var callstack = new + { + status = "pending", + alert_id = request.AlertId, + frames = Array.Empty() + }; + entries.Add(await WriteJsonEntryAsync( + tempDir, "evidence/callstack.json", BundleEntryTypes.Evidence, callstack, cancellationToken)); + + var provenance = new + { + status = "pending", + alert_id = request.AlertId, + attestations = Array.Empty() + }; + entries.Add(await WriteJsonEntryAsync( + tempDir, "evidence/provenance.json", BundleEntryTypes.Evidence, provenance, cancellationToken)); + + return entries; + } + + private async Task> WriteVexPlaceholdersAsync( + string tempDir, + BundleRequest request, + CancellationToken cancellationToken) + { + var entries = new List(); + + var currentVex = new + { + status = "not_available", + alert_id = request.AlertId, + statement = (object?)null + }; + entries.Add(await WriteJsonEntryAsync( + tempDir, "vex/current.json", BundleEntryTypes.Vex, currentVex, cancellationToken)); + + var historyVex = new + { + alert_id = request.AlertId, + history = Array.Empty() + }; + entries.Add(await WriteJsonEntryAsync( + tempDir, "vex/history.json", BundleEntryTypes.Vex, historyVex, cancellationToken)); + + return entries; + } + + private async Task> WriteSbomPlaceholdersAsync( + string tempDir, + BundleRequest request, + CancellationToken cancellationToken) + { + var entries = new List(); + + var currentSbom = new + { + alert_id = request.AlertId, + artifact_id = request.ArtifactId, + components = Array.Empty() + }; + entries.Add(await WriteJsonEntryAsync( + tempDir, "sbom/current.json", BundleEntryTypes.Sbom, currentSbom, cancellationToken)); + + if (request.BaselineScanId is not null) + { + var baselineSbom = new + { + alert_id = request.AlertId, + baseline_scan_id = request.BaselineScanId, + components = Array.Empty() + }; + entries.Add(await WriteJsonEntryAsync( + tempDir, "sbom/baseline.json", BundleEntryTypes.Sbom, baselineSbom, cancellationToken)); + + var diff = new + { + alert_id = request.AlertId, + current_scan_id = request.ArtifactId, + baseline_scan_id = request.BaselineScanId, + added = Array.Empty(), + removed = Array.Empty(), + changed = Array.Empty() + }; + entries.Add(await WriteJsonEntryAsync( + tempDir, "diff/delta.json", BundleEntryTypes.Diff, diff, cancellationToken)); + } + + return entries; + } + + private async Task WriteJsonEntryAsync( + string tempDir, + string relativePath, + string entryType, + T content, + CancellationToken cancellationToken) + { + var fullPath = Path.Combine(tempDir, relativePath); + var json = JsonSerializer.Serialize(content, JsonOptions); + var bytes = Encoding.UTF8.GetBytes(json); + + await File.WriteAllBytesAsync(fullPath, bytes, cancellationToken); + + return new BundleEntry + { + Path = relativePath, + EntryType = entryType, + Hash = ComputeHash(bytes), + Size = bytes.Length, + ContentType = "application/json" + }; + } + + private async Task WriteManifestAsync( + string tempDir, + BundleManifest manifest, + CancellationToken cancellationToken) + { + var json = JsonSerializer.Serialize(manifest, JsonOptions); + var bytes = Encoding.UTF8.GetBytes(json); + var fullPath = Path.Combine(tempDir, "manifest.json"); + + await File.WriteAllBytesAsync(fullPath, bytes, cancellationToken); + + return new BundleEntry + { + Path = "manifest.json", + EntryType = "manifest", + Hash = ComputeHash(bytes), + Size = bytes.Length, + ContentType = "application/json" + }; + } + + private BundleManifest CreateManifest( + string bundleId, + BundleRequest request, + List entries) + { + var contentHash = ComputeContentHash(entries); + + return new BundleManifest + { + BundleId = bundleId, + AlertId = request.AlertId, + ArtifactId = request.ArtifactId, + CreatedAt = _timeProvider.GetUtcNow(), + CreatedBy = request.ActorId, + Entries = entries.OrderBy(e => e.Path, StringComparer.Ordinal).ToList(), + ContentHash = contentHash, + CompletenessScore = 0, // Would be computed from actual evidence + ReplayToken = null, // Would be generated by replay token service + PlatformVersion = GetPlatformVersion() + }; + } + + private async Task CreateTarballAsync( + string sourceDir, + string bundleId, + CancellationToken cancellationToken) + { + var outputPath = Path.Combine(Path.GetTempPath(), $"alert_{bundleId}.stella.bundle.tgz"); + + await using var outputStream = File.Create(outputPath); + await using var gzipStream = new GZipStream(outputStream, CompressionLevel.Optimal); + await TarFile.CreateFromDirectoryAsync(sourceDir, gzipStream, false, cancellationToken); + + return outputPath; + } + + private static async Task ExtractTarballAsync( + string tarballPath, + string targetDir, + CancellationToken cancellationToken) + { + Directory.CreateDirectory(targetDir); + + await using var inputStream = File.OpenRead(tarballPath); + await using var gzipStream = new GZipStream(inputStream, CompressionMode.Decompress); + await TarFile.ExtractToDirectoryAsync(gzipStream, targetDir, true, cancellationToken); + } + + private static string ComputeContentHash(IEnumerable entries) + { + var sorted = entries.OrderBy(e => e.Path, StringComparer.Ordinal).Select(e => e.Hash); + var combined = string.Join(":", sorted); + return ComputeHash(Encoding.UTF8.GetBytes(combined)); + } + + private static string ComputeHash(byte[] content) + { + var hash = SHA256.HashData(content); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static string GetPlatformVersion() => + Assembly.GetExecutingAssembly() + .GetCustomAttribute() + ?.InformationalVersion ?? "unknown"; +} diff --git a/src/Findings/StellaOps.Findings.Ledger.WebService/Contracts/AlertContracts.cs b/src/Findings/StellaOps.Findings.Ledger.WebService/Contracts/AlertContracts.cs new file mode 100644 index 000000000..875044899 --- /dev/null +++ b/src/Findings/StellaOps.Findings.Ledger.WebService/Contracts/AlertContracts.cs @@ -0,0 +1,353 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Findings.Ledger.WebService.Contracts; + +/// +/// Alert filter query parameters. +/// +public sealed record AlertFilterQuery( + string? Band, + string? Severity, + string? Status, + string? ArtifactId, + string? VulnId, + string? ComponentPurl, + int Limit = 50, + int Offset = 0, + string? SortBy = null, + bool SortDescending = false); + +/// +/// Response for listing alerts. +/// +public sealed record AlertListResponse( + IReadOnlyList Items, + int TotalCount, + string? NextPageToken); + +/// +/// Summary of an alert for list views. +/// +public sealed record AlertSummary +{ + [JsonPropertyName("alert_id")] + public required string AlertId { get; init; } + + [JsonPropertyName("artifact_id")] + public required string ArtifactId { get; init; } + + [JsonPropertyName("vuln_id")] + public required string VulnId { get; init; } + + [JsonPropertyName("component_purl")] + public string? ComponentPurl { get; init; } + + [JsonPropertyName("severity")] + public required string Severity { get; init; } + + [JsonPropertyName("band")] + public required string Band { get; init; } + + [JsonPropertyName("status")] + public required string Status { get; init; } + + [JsonPropertyName("score")] + public double Score { get; init; } + + [JsonPropertyName("created_at")] + public required DateTimeOffset CreatedAt { get; init; } + + [JsonPropertyName("updated_at")] + public DateTimeOffset? UpdatedAt { get; init; } + + [JsonPropertyName("decision_count")] + public int DecisionCount { get; init; } +} + +/// +/// Evidence payload response for an alert. +/// +public sealed record EvidencePayloadResponse +{ + [JsonPropertyName("alert_id")] + public required string AlertId { get; init; } + + [JsonPropertyName("reachability")] + public EvidenceSectionResponse? Reachability { get; init; } + + [JsonPropertyName("callstack")] + public EvidenceSectionResponse? Callstack { get; init; } + + [JsonPropertyName("provenance")] + public EvidenceSectionResponse? Provenance { get; init; } + + [JsonPropertyName("vex")] + public VexEvidenceSectionResponse? Vex { get; init; } + + [JsonPropertyName("hashes")] + public required IReadOnlyList Hashes { get; init; } + + [JsonPropertyName("computed_at")] + public required DateTimeOffset ComputedAt { get; init; } +} + +/// +/// Evidence section with status and proof. +/// +public sealed record EvidenceSectionResponse +{ + [JsonPropertyName("status")] + public required string Status { get; init; } + + [JsonPropertyName("hash")] + public string? Hash { get; init; } + + [JsonPropertyName("proof")] + public object? Proof { get; init; } +} + +/// +/// VEX evidence section with current and history. +/// +public sealed record VexEvidenceSectionResponse +{ + [JsonPropertyName("status")] + public required string Status { get; init; } + + [JsonPropertyName("current")] + public VexStatementResponse? Current { get; init; } + + [JsonPropertyName("history")] + public IReadOnlyList? History { get; init; } +} + +/// +/// VEX statement summary. +/// +public sealed record VexStatementResponse +{ + [JsonPropertyName("statement_id")] + public required string StatementId { get; init; } + + [JsonPropertyName("status")] + public required string Status { get; init; } + + [JsonPropertyName("justification")] + public string? Justification { get; init; } + + [JsonPropertyName("impact_statement")] + public string? ImpactStatement { get; init; } + + [JsonPropertyName("timestamp")] + public required DateTimeOffset Timestamp { get; init; } + + [JsonPropertyName("issuer")] + public string? Issuer { get; init; } +} + +/// +/// Request to record a triage decision. +/// +public sealed record DecisionRequest +{ + [JsonPropertyName("decision_status")] + public required string DecisionStatus { get; init; } + + [JsonPropertyName("reason_code")] + public required string ReasonCode { get; init; } + + [JsonPropertyName("reason_text")] + public string? ReasonText { get; init; } + + [JsonPropertyName("evidence_hashes")] + public IReadOnlyList? EvidenceHashes { get; init; } + + [JsonPropertyName("policy_context")] + public string? PolicyContext { get; init; } + + [JsonPropertyName("rules_version")] + public string? RulesVersion { get; init; } +} + +/// +/// Response after recording a decision. +/// +public sealed record DecisionResponse +{ + [JsonPropertyName("decision_id")] + public required string DecisionId { get; init; } + + [JsonPropertyName("alert_id")] + public required string AlertId { get; init; } + + [JsonPropertyName("actor_id")] + public required string ActorId { get; init; } + + [JsonPropertyName("timestamp")] + public required DateTimeOffset Timestamp { get; init; } + + [JsonPropertyName("replay_token")] + public required string ReplayToken { get; init; } + + [JsonPropertyName("evidence_hashes")] + public IReadOnlyList? EvidenceHashes { get; init; } +} + +/// +/// Audit timeline for an alert. +/// +public sealed record AuditTimelineResponse +{ + [JsonPropertyName("alert_id")] + public required string AlertId { get; init; } + + [JsonPropertyName("events")] + public required IReadOnlyList Events { get; init; } + + [JsonPropertyName("total_count")] + public int TotalCount { get; init; } +} + +/// +/// Single audit event in timeline. +/// +public sealed record AuditEventResponse +{ + [JsonPropertyName("event_id")] + public required string EventId { get; init; } + + [JsonPropertyName("event_type")] + public required string EventType { get; init; } + + [JsonPropertyName("actor_id")] + public required string ActorId { get; init; } + + [JsonPropertyName("timestamp")] + public required DateTimeOffset Timestamp { get; init; } + + [JsonPropertyName("details")] + public object? Details { get; init; } + + [JsonPropertyName("replay_token")] + public string? ReplayToken { get; init; } +} + +/// +/// SBOM/VEX diff response. +/// +public sealed record AlertDiffResponse +{ + [JsonPropertyName("alert_id")] + public required string AlertId { get; init; } + + [JsonPropertyName("baseline_scan_id")] + public string? BaselineScanId { get; init; } + + [JsonPropertyName("current_scan_id")] + public required string CurrentScanId { get; init; } + + [JsonPropertyName("sbom_diff")] + public SbomDiffSummary? SbomDiff { get; init; } + + [JsonPropertyName("vex_diff")] + public VexDiffSummary? VexDiff { get; init; } + + [JsonPropertyName("computed_at")] + public required DateTimeOffset ComputedAt { get; init; } +} + +/// +/// SBOM diff summary. +/// +public sealed record SbomDiffSummary +{ + [JsonPropertyName("added_components")] + public int AddedComponents { get; init; } + + [JsonPropertyName("removed_components")] + public int RemovedComponents { get; init; } + + [JsonPropertyName("changed_components")] + public int ChangedComponents { get; init; } + + [JsonPropertyName("changes")] + public IReadOnlyList? Changes { get; init; } +} + +/// +/// Single component change. +/// +public sealed record ComponentChange +{ + [JsonPropertyName("purl")] + public required string Purl { get; init; } + + [JsonPropertyName("change_type")] + public required string ChangeType { get; init; } + + [JsonPropertyName("old_version")] + public string? OldVersion { get; init; } + + [JsonPropertyName("new_version")] + public string? NewVersion { get; init; } +} + +/// +/// VEX diff summary. +/// +public sealed record VexDiffSummary +{ + [JsonPropertyName("status_changes")] + public int StatusChanges { get; init; } + + [JsonPropertyName("new_statements")] + public int NewStatements { get; init; } + + [JsonPropertyName("changes")] + public IReadOnlyList? Changes { get; init; } +} + +/// +/// Single VEX status change. +/// +public sealed record VexStatusChange +{ + [JsonPropertyName("vuln_id")] + public required string VulnId { get; init; } + + [JsonPropertyName("old_status")] + public string? OldStatus { get; init; } + + [JsonPropertyName("new_status")] + public required string NewStatus { get; init; } + + [JsonPropertyName("timestamp")] + public required DateTimeOffset Timestamp { get; init; } +} + +/// +/// Bundle verification result. +/// +public sealed record BundleVerificationResult +{ + [JsonPropertyName("valid")] + public required bool Valid { get; init; } + + [JsonPropertyName("bundle_id")] + public string? BundleId { get; init; } + + [JsonPropertyName("merkle_root")] + public string? MerkleRoot { get; init; } + + [JsonPropertyName("signature_valid")] + public bool? SignatureValid { get; init; } + + [JsonPropertyName("timestamp_valid")] + public bool? TimestampValid { get; init; } + + [JsonPropertyName("errors")] + public IReadOnlyList? Errors { get; init; } + + [JsonPropertyName("verified_at")] + public required DateTimeOffset VerifiedAt { get; init; } +} diff --git a/src/Findings/StellaOps.Findings.Ledger.WebService/Program.cs b/src/Findings/StellaOps.Findings.Ledger.WebService/Program.cs index ac104a12c..993b40ab5 100644 --- a/src/Findings/StellaOps.Findings.Ledger.WebService/Program.cs +++ b/src/Findings/StellaOps.Findings.Ledger.WebService/Program.cs @@ -180,6 +180,10 @@ builder.Services.AddSingleton(); builder.Services.AddSingleton(); +// Alert and Decision services (SPRINT_3602) +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); + var app = builder.Build(); app.UseSerilogRequestLogging(); @@ -1475,6 +1479,204 @@ app.MapGet("/v1/vex-consensus/issuers/{issuerId}", async Task, ProblemHttpResult>> ( + HttpContext httpContext, + IAlertService alertService, + CancellationToken cancellationToken) => +{ + if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId)) + { + return tenantProblem!; + } + + var filter = new AlertFilterOptions( + Band: httpContext.Request.Query["band"].ToString(), + Severity: httpContext.Request.Query["severity"].ToString(), + Status: httpContext.Request.Query["status"].ToString(), + ArtifactId: httpContext.Request.Query["artifact_id"].ToString(), + VulnId: httpContext.Request.Query["vuln_id"].ToString(), + ComponentPurl: httpContext.Request.Query["component_purl"].ToString(), + Limit: ParseInt(httpContext.Request.Query["limit"]) ?? 50, + Offset: ParseInt(httpContext.Request.Query["offset"]) ?? 0, + SortBy: httpContext.Request.Query["sort_by"].ToString(), + SortDescending: ParseBool(httpContext.Request.Query["sort_desc"]) ?? false); + + var result = await alertService.ListAsync(tenantId, filter, cancellationToken).ConfigureAwait(false); + + var response = new AlertListResponse( + result.Items.Select(a => new AlertSummary + { + AlertId = a.AlertId, + ArtifactId = a.ArtifactId, + VulnId = a.VulnId, + ComponentPurl = a.ComponentPurl, + Severity = a.Severity, + Band = a.Band, + Status = a.Status, + Score = a.Score, + CreatedAt = a.CreatedAt, + UpdatedAt = a.UpdatedAt, + DecisionCount = a.DecisionCount + }).ToList(), + result.TotalCount, + result.NextPageToken); + + return TypedResults.Json(response); +}) +.WithName("ListAlerts") +.RequireAuthorization(AlertReadPolicy) +.Produces(StatusCodes.Status200OK) +.ProducesProblem(StatusCodes.Status400BadRequest); + +app.MapGet("/v1/alerts/{alertId}", async Task, NotFound, ProblemHttpResult>> ( + HttpContext httpContext, + string alertId, + IAlertService alertService, + CancellationToken cancellationToken) => +{ + if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId)) + { + return tenantProblem!; + } + + var alert = await alertService.GetAsync(tenantId, alertId, cancellationToken).ConfigureAwait(false); + if (alert is null) + { + return TypedResults.NotFound(); + } + + var response = new AlertSummary + { + AlertId = alert.AlertId, + ArtifactId = alert.ArtifactId, + VulnId = alert.VulnId, + ComponentPurl = alert.ComponentPurl, + Severity = alert.Severity, + Band = alert.Band, + Status = alert.Status, + Score = alert.Score, + CreatedAt = alert.CreatedAt, + UpdatedAt = alert.UpdatedAt, + DecisionCount = alert.DecisionCount + }; + + return TypedResults.Json(response); +}) +.WithName("GetAlert") +.RequireAuthorization(AlertReadPolicy) +.Produces(StatusCodes.Status200OK) +.Produces(StatusCodes.Status404NotFound) +.ProducesProblem(StatusCodes.Status400BadRequest); + +app.MapPost("/v1/alerts/{alertId}/decisions", async Task, NotFound, ProblemHttpResult>> ( + HttpContext httpContext, + string alertId, + DecisionRequest request, + IAlertService alertService, + IDecisionService decisionService, + TimeProvider timeProvider, + CancellationToken cancellationToken) => +{ + if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId)) + { + return tenantProblem!; + } + + // Validate alert exists + var alert = await alertService.GetAsync(tenantId, alertId, cancellationToken).ConfigureAwait(false); + if (alert is null) + { + return TypedResults.NotFound(); + } + + // Get actor from auth context + var actorId = httpContext.User.FindFirst("sub")?.Value ?? "anonymous"; + + // Generate simple replay token + var tokenInput = $"{alertId}|{actorId}|{request.DecisionStatus}|{timeProvider.GetUtcNow():O}"; + var replayToken = Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(tokenInput))).ToLowerInvariant(); + + // Record decision (append-only) + var decision = await decisionService.RecordAsync(new DecisionEvent + { + AlertId = alertId, + ArtifactId = alert.ArtifactId, + ActorId = actorId, + Timestamp = timeProvider.GetUtcNow(), + DecisionStatus = request.DecisionStatus, + ReasonCode = request.ReasonCode, + ReasonText = request.ReasonText, + EvidenceHashes = request.EvidenceHashes?.ToList() ?? new(), + PolicyContext = request.PolicyContext, + ReplayToken = replayToken + }, cancellationToken).ConfigureAwait(false); + + var response = new DecisionResponse + { + DecisionId = decision.Id, + AlertId = decision.AlertId, + ActorId = decision.ActorId, + Timestamp = decision.Timestamp, + ReplayToken = decision.ReplayToken, + EvidenceHashes = decision.EvidenceHashes + }; + + return TypedResults.Created($"/v1/alerts/{alertId}/audit", response); +}) +.WithName("RecordDecision") +.RequireAuthorization(AlertDecidePolicy) +.Produces(StatusCodes.Status201Created) +.Produces(StatusCodes.Status404NotFound) +.ProducesProblem(StatusCodes.Status400BadRequest); + +app.MapGet("/v1/alerts/{alertId}/audit", async Task, NotFound, ProblemHttpResult>> ( + HttpContext httpContext, + string alertId, + IDecisionService decisionService, + CancellationToken cancellationToken) => +{ + if (!TryGetTenant(httpContext, out var tenantProblem, out var tenantId)) + { + return tenantProblem!; + } + + var decisions = await decisionService.GetHistoryAsync(tenantId, alertId, cancellationToken).ConfigureAwait(false); + + var events = decisions.Select(d => new AuditEventResponse + { + EventId = d.Id, + EventType = "decision_recorded", + ActorId = d.ActorId, + Timestamp = d.Timestamp, + Details = new + { + decision_status = d.DecisionStatus, + reason_code = d.ReasonCode, + reason_text = d.ReasonText, + evidence_hashes = d.EvidenceHashes + }, + ReplayToken = d.ReplayToken + }).ToList(); + + var response = new AuditTimelineResponse + { + AlertId = alertId, + Events = events, + TotalCount = events.Count + }; + + return TypedResults.Json(response); +}) +.WithName("GetAlertAudit") +.RequireAuthorization(AlertReadPolicy) +.Produces(StatusCodes.Status200OK) +.Produces(StatusCodes.Status404NotFound) +.ProducesProblem(StatusCodes.Status400BadRequest); + app.MapPost("/v1/vex-consensus/issuers", async Task, ProblemHttpResult>> ( RegisterVexIssuerRequest request, VexConsensusService consensusService, diff --git a/src/Findings/StellaOps.Findings.Ledger/Domain/DecisionModels.cs b/src/Findings/StellaOps.Findings.Ledger/Domain/DecisionModels.cs new file mode 100644 index 000000000..3bfd03bb5 --- /dev/null +++ b/src/Findings/StellaOps.Findings.Ledger/Domain/DecisionModels.cs @@ -0,0 +1,452 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Findings.Ledger.Domain; + +/// +/// Immutable decision event per advisory §11. +/// +public sealed class DecisionEvent +{ + /// + /// Unique identifier for this decision event. + /// + public string Id { get; init; } = Guid.NewGuid().ToString("N"); + + /// + /// Alert identifier. + /// + public required string AlertId { get; init; } + + /// + /// Artifact identifier (image digest/commit hash). + /// + public required string ArtifactId { get; init; } + + /// + /// Actor who made the decision. + /// + public required string ActorId { get; init; } + + /// + /// When the decision was recorded (UTC). + /// + public required DateTimeOffset Timestamp { get; init; } + + /// + /// Decision status: affected, not_affected, under_investigation. + /// + public required string DecisionStatus { get; init; } + + /// + /// Preset reason code. + /// + public required string ReasonCode { get; init; } + + /// + /// Custom reason text. + /// + public string? ReasonText { get; init; } + + /// + /// Content-addressed evidence hashes. + /// + public required List EvidenceHashes { get; init; } + + /// + /// Policy context (ruleset version, policy id). + /// + public string? PolicyContext { get; init; } + + /// + /// Deterministic replay token for reproducibility. + /// + public required string ReplayToken { get; init; } +} + +/// +/// Alert entity for triage. +/// +public sealed class Alert +{ + /// + /// Unique alert identifier. + /// + public required string AlertId { get; init; } + + /// + /// Tenant identifier. + /// + public required string TenantId { get; init; } + + /// + /// Artifact identifier (image digest/commit hash). + /// + public required string ArtifactId { get; init; } + + /// + /// Vulnerability identifier. + /// + public required string VulnId { get; init; } + + /// + /// Affected component PURL. + /// + public string? ComponentPurl { get; init; } + + /// + /// Severity level (critical, high, medium, low). + /// + public required string Severity { get; init; } + + /// + /// Triage band (hot, warm, cold). + /// + public required string Band { get; init; } + + /// + /// Alert status (open, in_review, decided, closed). + /// + public required string Status { get; init; } + + /// + /// Composite triage score. + /// + public double Score { get; init; } + + /// + /// When the alert was created. + /// + public required DateTimeOffset CreatedAt { get; init; } + + /// + /// When the alert was last updated. + /// + public DateTimeOffset? UpdatedAt { get; init; } + + /// + /// Number of decisions recorded for this alert. + /// + public int DecisionCount { get; init; } +} + +/// +/// Evidence bundle for an alert. +/// +public sealed class EvidenceBundle +{ + /// + /// Alert identifier. + /// + public required string AlertId { get; init; } + + /// + /// Reachability evidence. + /// + public EvidenceSection? Reachability { get; init; } + + /// + /// Call stack evidence. + /// + public EvidenceSection? CallStack { get; init; } + + /// + /// Provenance evidence. + /// + public EvidenceSection? Provenance { get; init; } + + /// + /// VEX status evidence. + /// + public VexStatusEvidence? VexStatus { get; init; } + + /// + /// Content-addressed hashes for all evidence. + /// + public required EvidenceHashes Hashes { get; init; } + + /// + /// When the bundle was computed. + /// + public required DateTimeOffset ComputedAt { get; init; } +} + +/// +/// Evidence section with status and proof. +/// +public sealed class EvidenceSection +{ + /// + /// Status: available, loading, unavailable, error. + /// + public required string Status { get; init; } + + /// + /// Content hash for this evidence. + /// + public string? Hash { get; init; } + + /// + /// Proof data (type-specific). + /// + public object? Proof { get; init; } +} + +/// +/// VEX status evidence with history. +/// +public sealed class VexStatusEvidence +{ + /// + /// Status: available, unavailable. + /// + public required string Status { get; init; } + + /// + /// Current VEX statement. + /// + public VexStatement? Current { get; init; } + + /// + /// Historical VEX statements. + /// + public IReadOnlyList? History { get; init; } +} + +/// +/// VEX statement summary. +/// +public sealed class VexStatement +{ + /// + /// Statement identifier. + /// + public required string StatementId { get; init; } + + /// + /// VEX status. + /// + public required string Status { get; init; } + + /// + /// Justification code. + /// + public string? Justification { get; init; } + + /// + /// Impact statement. + /// + public string? ImpactStatement { get; init; } + + /// + /// When the statement was issued. + /// + public required DateTimeOffset Timestamp { get; init; } + + /// + /// Statement issuer. + /// + public string? Issuer { get; init; } +} + +/// +/// Content-addressed hashes for evidence bundle. +/// +public sealed class EvidenceHashes +{ + /// + /// All hashes for the bundle. + /// + public required IReadOnlyList Hashes { get; init; } +} + +/// +/// Audit timeline for an alert. +/// +public sealed class AuditTimeline +{ + /// + /// Alert identifier. + /// + public required string AlertId { get; init; } + + /// + /// List of audit events. + /// + public required IReadOnlyList Events { get; init; } + + /// + /// Total count of events. + /// + public int TotalCount { get; init; } +} + +/// +/// Single audit event. +/// +public sealed class AuditEvent +{ + /// + /// Event identifier. + /// + public required string EventId { get; init; } + + /// + /// Type of audit event. + /// + public required string EventType { get; init; } + + /// + /// Actor who triggered the event. + /// + public required string ActorId { get; init; } + + /// + /// When the event occurred. + /// + public required DateTimeOffset Timestamp { get; init; } + + /// + /// Event-specific details. + /// + public object? Details { get; init; } + + /// + /// Replay token if applicable. + /// + public string? ReplayToken { get; init; } +} + +/// +/// Alert diff result. +/// +public sealed class AlertDiff +{ + /// + /// Alert identifier. + /// + public required string AlertId { get; init; } + + /// + /// Baseline scan identifier. + /// + public string? BaselineScanId { get; init; } + + /// + /// Current scan identifier. + /// + public required string CurrentScanId { get; init; } + + /// + /// SBOM diff summary. + /// + public SbomDiff? SbomDiff { get; init; } + + /// + /// VEX diff summary. + /// + public VexDiff? VexDiff { get; init; } + + /// + /// When the diff was computed. + /// + public required DateTimeOffset ComputedAt { get; init; } +} + +/// +/// SBOM diff summary. +/// +public sealed class SbomDiff +{ + /// + /// Number of added components. + /// + public int AddedComponents { get; init; } + + /// + /// Number of removed components. + /// + public int RemovedComponents { get; init; } + + /// + /// Number of changed components. + /// + public int ChangedComponents { get; init; } + + /// + /// Detailed changes. + /// + public IReadOnlyList? Changes { get; init; } +} + +/// +/// Single component diff. +/// +public sealed class ComponentDiff +{ + /// + /// Component PURL. + /// + public required string Purl { get; init; } + + /// + /// Type of change: added, removed, changed. + /// + public required string ChangeType { get; init; } + + /// + /// Old version if changed/removed. + /// + public string? OldVersion { get; init; } + + /// + /// New version if changed/added. + /// + public string? NewVersion { get; init; } +} + +/// +/// VEX diff summary. +/// +public sealed class VexDiff +{ + /// + /// Number of status changes. + /// + public int StatusChanges { get; init; } + + /// + /// Number of new statements. + /// + public int NewStatements { get; init; } + + /// + /// Detailed changes. + /// + public IReadOnlyList? Changes { get; init; } +} + +/// +/// Single VEX status diff. +/// +public sealed class VexStatusDiff +{ + /// + /// Vulnerability identifier. + /// + public required string VulnId { get; init; } + + /// + /// Old status. + /// + public string? OldStatus { get; init; } + + /// + /// New status. + /// + public required string NewStatus { get; init; } + + /// + /// When the change occurred. + /// + public required DateTimeOffset Timestamp { get; init; } +} diff --git a/src/Findings/StellaOps.Findings.Ledger/Services/AlertService.cs b/src/Findings/StellaOps.Findings.Ledger/Services/AlertService.cs new file mode 100644 index 000000000..51da8d72f --- /dev/null +++ b/src/Findings/StellaOps.Findings.Ledger/Services/AlertService.cs @@ -0,0 +1,163 @@ +using Microsoft.Extensions.Logging; +using StellaOps.Findings.Ledger.Domain; +using StellaOps.Findings.Ledger.Infrastructure; + +namespace StellaOps.Findings.Ledger.Services; + +/// +/// Service for alert operations, wrapping the scored findings query system. +/// +public sealed class AlertService : IAlertService +{ + private readonly IScoredFindingsQueryService _queryService; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + public AlertService( + IScoredFindingsQueryService queryService, + TimeProvider timeProvider, + ILogger logger) + { + _queryService = queryService ?? throw new ArgumentNullException(nameof(queryService)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + /// + /// Lists alerts with filtering and pagination. + /// + public async Task ListAsync( + string tenantId, + AlertFilterOptions filter, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + + _logger.LogDebug( + "Listing alerts for tenant {TenantId} with filter: band={Band}, severity={Severity}, status={Status}", + tenantId, filter.Band, filter.Severity, filter.Status); + + // Convert band filter to score range + var (minScore, maxScore) = GetScoreRangeForBand(filter.Band); + + // Build query + var query = new ScoredFindingsQuery + { + TenantId = tenantId, + MinScore = minScore, + MaxScore = maxScore, + Severities = !string.IsNullOrWhiteSpace(filter.Severity) ? new[] { filter.Severity } : null, + Statuses = !string.IsNullOrWhiteSpace(filter.Status) ? new[] { filter.Status } : null, + Limit = filter.Limit, + Descending = filter.SortDescending, + SortBy = MapSortField(filter.SortBy) + }; + + var result = await _queryService.QueryAsync(query, cancellationToken).ConfigureAwait(false); + + // Map findings to alerts + var alerts = result.Findings.Select(f => MapToAlert(f)).ToList(); + + _logger.LogInformation( + "Found {Count} alerts for tenant {TenantId} (total: {Total})", + alerts.Count, tenantId, result.TotalCount); + + return new AlertListResult(alerts, result.TotalCount, result.NextCursor); + } + + /// + /// Gets a specific alert by ID. + /// + public async Task GetAsync( + string tenantId, + string alertId, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(alertId); + + _logger.LogDebug("Getting alert {AlertId} for tenant {TenantId}", alertId, tenantId); + + // Query for the specific finding + var query = new ScoredFindingsQuery + { + TenantId = tenantId, + Limit = 1 + }; + + var result = await _queryService.QueryAsync(query, cancellationToken).ConfigureAwait(false); + var finding = result.Findings.FirstOrDefault(f => f.FindingId == alertId); + + if (finding is null) + { + _logger.LogDebug("Alert {AlertId} not found for tenant {TenantId}", alertId, tenantId); + return null; + } + + return MapToAlert(finding); + } + + private static Alert MapToAlert(ScoredFinding finding) + { + // Compute band based on risk score + var score = finding.RiskScore.HasValue ? (double)finding.RiskScore.Value : 0.0; + var band = ComputeBand(score); + + // Parse finding ID to extract components (format: tenantId|artifactId|vulnId) + var parts = finding.FindingId.Split('|'); + var artifactId = parts.Length > 1 ? parts[1] : "unknown"; + var vulnId = parts.Length > 2 ? parts[2] : "unknown"; + + return new Alert + { + AlertId = finding.FindingId, + TenantId = finding.TenantId, + ArtifactId = artifactId, + VulnId = vulnId, + ComponentPurl = null, // Not available in ScoredFinding + Severity = finding.RiskSeverity ?? "unknown", + Band = band, + Status = finding.Status ?? "open", + Score = score, + CreatedAt = finding.UpdatedAt, + UpdatedAt = finding.UpdatedAt, + DecisionCount = 0 // Would need additional query + }; + } + + private static string ComputeBand(double score) + { + // Compute band based on score thresholds + // Hot: score >= 0.70 + // Warm: 0.40 <= score < 0.70 + // Cold: score < 0.40 + return score switch + { + >= 0.70 => "hot", + >= 0.40 => "warm", + _ => "cold" + }; + } + + private static (decimal? MinScore, decimal? MaxScore) GetScoreRangeForBand(string? band) + { + return band?.ToLowerInvariant() switch + { + "hot" => (0.70m, null), + "warm" => (0.40m, 0.70m), + "cold" => (null, 0.40m), + _ => (null, null) + }; + } + + private static ScoredFindingsSortField MapSortField(string? sortBy) + { + return sortBy?.ToLowerInvariant() switch + { + "severity" => ScoredFindingsSortField.RiskSeverity, + "updated" => ScoredFindingsSortField.UpdatedAt, + "score" => ScoredFindingsSortField.RiskScore, + _ => ScoredFindingsSortField.RiskScore + }; + } +} diff --git a/src/Findings/StellaOps.Findings.Ledger/Services/DecisionService.cs b/src/Findings/StellaOps.Findings.Ledger/Services/DecisionService.cs new file mode 100644 index 000000000..2381c9508 --- /dev/null +++ b/src/Findings/StellaOps.Findings.Ledger/Services/DecisionService.cs @@ -0,0 +1,162 @@ +using System.Text.Json; +using System.Text.Json.Nodes; +using Microsoft.Extensions.Logging; +using StellaOps.Findings.Ledger.Domain; +using StellaOps.Findings.Ledger.Hashing; +using StellaOps.Findings.Ledger.Infrastructure; + +namespace StellaOps.Findings.Ledger.Services; + +/// +/// Service for recording and querying triage decisions. +/// +public sealed class DecisionService : IDecisionService +{ + private readonly ILedgerEventWriteService _writeService; + private readonly ILedgerEventRepository _repository; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + private static readonly string[] ValidStatuses = { "affected", "not_affected", "under_investigation" }; + + public DecisionService( + ILedgerEventWriteService writeService, + ILedgerEventRepository repository, + TimeProvider timeProvider, + ILogger logger) + { + _writeService = writeService ?? throw new ArgumentNullException(nameof(writeService)); + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + /// + /// Records a decision event (append-only, immutable). + /// + public async Task RecordAsync( + DecisionEvent decision, + CancellationToken cancellationToken) + { + // Validate decision + ValidateDecision(decision); + + var now = _timeProvider.GetUtcNow(); + var tenantId = GetTenantIdFromAlert(decision.AlertId); + var chainId = LedgerChainIdGenerator.FromTenantSubject(tenantId, decision.AlertId); + var eventId = Guid.NewGuid(); + + // Build payload + var payload = new JsonObject + { + ["decision_id"] = decision.Id, + ["alert_id"] = decision.AlertId, + ["artifact_id"] = decision.ArtifactId, + ["decision_status"] = decision.DecisionStatus, + ["reason_code"] = decision.ReasonCode, + ["replay_token"] = decision.ReplayToken + }; + + if (decision.ReasonText is not null) + { + payload["reason_text"] = decision.ReasonText; + } + + if (decision.EvidenceHashes?.Count > 0) + { + var hashArray = new JsonArray(); + foreach (var hash in decision.EvidenceHashes) + { + hashArray.Add(hash); + } + payload["evidence_hashes"] = hashArray; + } + + if (decision.PolicyContext is not null) + { + payload["policy_context"] = decision.PolicyContext; + } + + // Create canonical envelope + var canonicalEnvelope = LedgerCanonicalJsonSerializer.Canonicalize(payload); + + // Create draft event using the "finding.status_changed" event type + // as decisions represent status transitions + var draft = new LedgerEventDraft( + TenantId: tenantId, + ChainId: chainId, + SequenceNumber: 0, // Will be determined by write service + EventId: eventId, + EventType: LedgerEventConstants.EventFindingStatusChanged, + PolicyVersion: "1.0.0", + FindingId: decision.AlertId, + ArtifactId: decision.ArtifactId, + SourceRunId: null, + ActorId: decision.ActorId, + ActorType: "operator", + OccurredAt: decision.Timestamp, + RecordedAt: now, + Payload: payload, + CanonicalEnvelope: canonicalEnvelope, + ProvidedPreviousHash: null, + EvidenceBundleReference: null); + + var result = await _writeService.AppendAsync(draft, cancellationToken).ConfigureAwait(false); + if (result.Status != LedgerWriteStatus.Success && result.Status != LedgerWriteStatus.Idempotent) + { + throw new InvalidOperationException($"Failed to record decision: {string.Join(", ", result.Errors)}"); + } + + _logger.LogInformation( + "Decision {DecisionId} recorded for alert {AlertId}: {Status}", + decision.Id, decision.AlertId, decision.DecisionStatus); + + return decision; + } + + /// + /// Gets decision history for an alert (immutable timeline). + /// + public async Task> GetHistoryAsync( + string tenantId, + string alertId, + CancellationToken cancellationToken) + { + // Decision history would need to be fetched from projections + // or by querying events for the alert's chain. + // For now, return empty list as the full implementation requires + // additional repository support. + _logger.LogInformation( + "Getting decision history for alert {AlertId} in tenant {TenantId}", + alertId, tenantId); + + // This would need to be implemented with a projection repository + // or by scanning ledger events for the alert's chain + return Array.Empty(); + } + + private static void ValidateDecision(DecisionEvent decision) + { + if (string.IsNullOrWhiteSpace(decision.AlertId)) + throw new ArgumentException("AlertId is required"); + + if (string.IsNullOrWhiteSpace(decision.DecisionStatus)) + throw new ArgumentException("DecisionStatus is required"); + + if (!ValidStatuses.Contains(decision.DecisionStatus)) + throw new ArgumentException($"Invalid DecisionStatus: {decision.DecisionStatus}"); + + if (string.IsNullOrWhiteSpace(decision.ReasonCode)) + throw new ArgumentException("ReasonCode is required"); + + if (string.IsNullOrWhiteSpace(decision.ReplayToken)) + throw new ArgumentException("ReplayToken is required"); + } + + private static string GetTenantIdFromAlert(string alertId) + { + // Extract tenant from alert ID format: tenant|artifact|vuln + var parts = alertId.Split('|'); + return parts.Length > 0 ? parts[0] : "default"; + } +} diff --git a/src/Findings/StellaOps.Findings.Ledger/Services/IAlertService.cs b/src/Findings/StellaOps.Findings.Ledger/Services/IAlertService.cs new file mode 100644 index 000000000..59d70c203 --- /dev/null +++ b/src/Findings/StellaOps.Findings.Ledger/Services/IAlertService.cs @@ -0,0 +1,48 @@ +using StellaOps.Findings.Ledger.Domain; + +namespace StellaOps.Findings.Ledger.Services; + +/// +/// Service for alert operations. +/// +public interface IAlertService +{ + /// + /// Lists alerts with filtering and pagination. + /// + Task ListAsync( + string tenantId, + AlertFilterOptions filter, + CancellationToken cancellationToken = default); + + /// + /// Gets a specific alert by ID. + /// + Task GetAsync( + string tenantId, + string alertId, + CancellationToken cancellationToken = default); +} + +/// +/// Filter options for alert listing. +/// +public sealed record AlertFilterOptions( + string? Band = null, + string? Severity = null, + string? Status = null, + string? ArtifactId = null, + string? VulnId = null, + string? ComponentPurl = null, + int Limit = 50, + int Offset = 0, + string? SortBy = null, + bool SortDescending = false); + +/// +/// Result of alert listing. +/// +public sealed record AlertListResult( + IReadOnlyList Items, + int TotalCount, + string? NextPageToken); diff --git a/src/Findings/StellaOps.Findings.Ledger/Services/IAuditService.cs b/src/Findings/StellaOps.Findings.Ledger/Services/IAuditService.cs new file mode 100644 index 000000000..f59eaab13 --- /dev/null +++ b/src/Findings/StellaOps.Findings.Ledger/Services/IAuditService.cs @@ -0,0 +1,17 @@ +using StellaOps.Findings.Ledger.Domain; + +namespace StellaOps.Findings.Ledger.Services; + +/// +/// Service for audit timeline retrieval. +/// +public interface IAuditService +{ + /// + /// Gets the audit timeline for an alert. + /// + Task GetTimelineAsync( + string tenantId, + string alertId, + CancellationToken cancellationToken = default); +} diff --git a/src/Findings/StellaOps.Findings.Ledger/Services/IDecisionService.cs b/src/Findings/StellaOps.Findings.Ledger/Services/IDecisionService.cs new file mode 100644 index 000000000..ab3868a30 --- /dev/null +++ b/src/Findings/StellaOps.Findings.Ledger/Services/IDecisionService.cs @@ -0,0 +1,24 @@ +using StellaOps.Findings.Ledger.Domain; + +namespace StellaOps.Findings.Ledger.Services; + +/// +/// Service for recording and querying triage decisions. +/// +public interface IDecisionService +{ + /// + /// Records a decision event (append-only, immutable). + /// + Task RecordAsync( + DecisionEvent decision, + CancellationToken cancellationToken = default); + + /// + /// Gets decision history for an alert (immutable timeline). + /// + Task> GetHistoryAsync( + string tenantId, + string alertId, + CancellationToken cancellationToken = default); +} diff --git a/src/Findings/StellaOps.Findings.Ledger/Services/IDiffService.cs b/src/Findings/StellaOps.Findings.Ledger/Services/IDiffService.cs new file mode 100644 index 000000000..31c36308a --- /dev/null +++ b/src/Findings/StellaOps.Findings.Ledger/Services/IDiffService.cs @@ -0,0 +1,18 @@ +using StellaOps.Findings.Ledger.Domain; + +namespace StellaOps.Findings.Ledger.Services; + +/// +/// Service for computing SBOM/VEX diffs. +/// +public interface IDiffService +{ + /// + /// Computes a diff for an alert against a baseline. + /// + Task ComputeDiffAsync( + string tenantId, + string alertId, + string? baselineScanId, + CancellationToken cancellationToken = default); +} diff --git a/src/Findings/StellaOps.Findings.Ledger/Services/IEvidenceBundleService.cs b/src/Findings/StellaOps.Findings.Ledger/Services/IEvidenceBundleService.cs new file mode 100644 index 000000000..d3721d1a8 --- /dev/null +++ b/src/Findings/StellaOps.Findings.Ledger/Services/IEvidenceBundleService.cs @@ -0,0 +1,17 @@ +using StellaOps.Findings.Ledger.Domain; + +namespace StellaOps.Findings.Ledger.Services; + +/// +/// Service for evidence bundle retrieval. +/// +public interface IEvidenceBundleService +{ + /// + /// Gets the evidence bundle for an alert. + /// + Task GetBundleAsync( + string tenantId, + string alertId, + CancellationToken cancellationToken = default); +} diff --git a/src/Scanner/StellaOps.Scanner.WebService/Contracts/CallGraphContracts.cs b/src/Scanner/StellaOps.Scanner.WebService/Contracts/CallGraphContracts.cs new file mode 100644 index 000000000..c570a7100 --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.WebService/Contracts/CallGraphContracts.cs @@ -0,0 +1,69 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Scanner.WebService.Contracts; + +/// +/// Call graph submission request (CallGraphV1 schema). +/// +public sealed record CallGraphV1Dto( + [property: JsonPropertyName("schema")] string Schema, + [property: JsonPropertyName("scanKey")] string ScanKey, + [property: JsonPropertyName("language")] string Language, + [property: JsonPropertyName("nodes")] IReadOnlyList Nodes, + [property: JsonPropertyName("edges")] IReadOnlyList Edges, + [property: JsonPropertyName("artifacts")] IReadOnlyList? Artifacts = null, + [property: JsonPropertyName("entrypoints")] IReadOnlyList? Entrypoints = null); + +/// +/// Artifact in a call graph. +/// +public sealed record CallGraphArtifactDto( + [property: JsonPropertyName("artifactKey")] string ArtifactKey, + [property: JsonPropertyName("kind")] string? Kind = null, + [property: JsonPropertyName("sha256")] string? Sha256 = null); + +/// +/// Node in a call graph. +/// +public sealed record CallGraphNodeDto( + [property: JsonPropertyName("nodeId")] string NodeId, + [property: JsonPropertyName("symbolKey")] string SymbolKey, + [property: JsonPropertyName("artifactKey")] string? ArtifactKey = null, + [property: JsonPropertyName("visibility")] string? Visibility = null, + [property: JsonPropertyName("isEntrypointCandidate")] bool IsEntrypointCandidate = false); + +/// +/// Edge in a call graph. +/// +public sealed record CallGraphEdgeDto( + [property: JsonPropertyName("from")] string From, + [property: JsonPropertyName("to")] string To, + [property: JsonPropertyName("kind")] string Kind = "static", + [property: JsonPropertyName("reason")] string? Reason = null, + [property: JsonPropertyName("weight")] double Weight = 1.0); + +/// +/// Entrypoint in a call graph. +/// +public sealed record CallGraphEntrypointDto( + [property: JsonPropertyName("nodeId")] string NodeId, + [property: JsonPropertyName("kind")] string Kind, + [property: JsonPropertyName("route")] string? Route = null, + [property: JsonPropertyName("framework")] string? Framework = null); + +/// +/// Response when call graph is accepted. +/// +public sealed record CallGraphAcceptedResponseDto( + [property: JsonPropertyName("callgraphId")] string CallgraphId, + [property: JsonPropertyName("nodeCount")] int NodeCount, + [property: JsonPropertyName("edgeCount")] int EdgeCount, + [property: JsonPropertyName("digest")] string Digest); + +/// +/// Existing call graph reference (for duplicate detection). +/// +public sealed record ExistingCallGraphDto( + [property: JsonPropertyName("id")] string Id, + [property: JsonPropertyName("digest")] string Digest, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt); diff --git a/src/Scanner/StellaOps.Scanner.WebService/Contracts/ProofSpineContracts.cs b/src/Scanner/StellaOps.Scanner.WebService/Contracts/ProofSpineContracts.cs index 088868364..1182dac32 100644 --- a/src/Scanner/StellaOps.Scanner.WebService/Contracts/ProofSpineContracts.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Contracts/ProofSpineContracts.cs @@ -129,10 +129,22 @@ public sealed record DsseEnvelopeDto public sealed record DsseSignatureDto { [JsonPropertyName("keyid")] + [JsonPropertyOrder(0)] public string KeyId { get; init; } = string.Empty; [JsonPropertyName("sig")] + [JsonPropertyOrder(1)] public string Sig { get; init; } = string.Empty; + + [JsonPropertyName("algorithm")] + [JsonPropertyOrder(2)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Algorithm { get; init; } + + [JsonPropertyName("signature")] + [JsonPropertyOrder(3)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Signature { get; init; } } public sealed record ProofSpineVerificationDto diff --git a/src/Scanner/StellaOps.Scanner.WebService/Contracts/ReachabilityContracts.cs b/src/Scanner/StellaOps.Scanner.WebService/Contracts/ReachabilityContracts.cs new file mode 100644 index 000000000..c26be71d3 --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.WebService/Contracts/ReachabilityContracts.cs @@ -0,0 +1,109 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Scanner.WebService.Contracts; + +/// +/// Request to trigger reachability computation. +/// +public sealed record ComputeReachabilityRequestDto( + [property: JsonPropertyName("forceRecompute")] bool ForceRecompute = false, + [property: JsonPropertyName("entrypoints")] IReadOnlyList? Entrypoints = null, + [property: JsonPropertyName("targets")] IReadOnlyList? Targets = null); + +/// +/// Response from triggering reachability computation. +/// +public sealed record ComputeReachabilityResponseDto( + [property: JsonPropertyName("jobId")] string JobId, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("estimatedDuration")] string? EstimatedDuration = null); + +/// +/// Component reachability status. +/// +public sealed record ComponentReachabilityDto( + [property: JsonPropertyName("purl")] string Purl, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("confidence")] double Confidence, + [property: JsonPropertyName("latticeState")] string? LatticeState = null, + [property: JsonPropertyName("why")] IReadOnlyList? Why = null); + +/// +/// List of component reachability results. +/// +public sealed record ComponentReachabilityListDto( + [property: JsonPropertyName("items")] IReadOnlyList Items, + [property: JsonPropertyName("total")] int Total); + +/// +/// Vulnerability finding with reachability. +/// +public sealed record ReachabilityFindingDto( + [property: JsonPropertyName("cveId")] string CveId, + [property: JsonPropertyName("purl")] string Purl, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("confidence")] double Confidence, + [property: JsonPropertyName("latticeState")] string? LatticeState = null, + [property: JsonPropertyName("severity")] string? Severity = null, + [property: JsonPropertyName("affectedVersions")] string? AffectedVersions = null); + +/// +/// List of reachability findings. +/// +public sealed record ReachabilityFindingListDto( + [property: JsonPropertyName("items")] IReadOnlyList Items, + [property: JsonPropertyName("total")] int Total); + +/// +/// Explanation reason with code and impact. +/// +public sealed record ExplanationReasonDto( + [property: JsonPropertyName("code")] string Code, + [property: JsonPropertyName("description")] string Description, + [property: JsonPropertyName("impact")] double? Impact = null); + +/// +/// Static analysis evidence. +/// +public sealed record StaticAnalysisEvidenceDto( + [property: JsonPropertyName("callgraphDigest")] string? CallgraphDigest = null, + [property: JsonPropertyName("pathLength")] int? PathLength = null, + [property: JsonPropertyName("edgeTypes")] IReadOnlyList? EdgeTypes = null); + +/// +/// Runtime evidence. +/// +public sealed record RuntimeEvidenceDto( + [property: JsonPropertyName("observed")] bool Observed, + [property: JsonPropertyName("hitCount")] int HitCount = 0, + [property: JsonPropertyName("lastObserved")] DateTimeOffset? LastObserved = null); + +/// +/// Policy evaluation result. +/// +public sealed record PolicyEvaluationEvidenceDto( + [property: JsonPropertyName("policyDigest")] string? PolicyDigest = null, + [property: JsonPropertyName("verdict")] string? Verdict = null, + [property: JsonPropertyName("verdictReason")] string? VerdictReason = null); + +/// +/// Evidence chain for explanation. +/// +public sealed record EvidenceChainDto( + [property: JsonPropertyName("staticAnalysis")] StaticAnalysisEvidenceDto? StaticAnalysis = null, + [property: JsonPropertyName("runtimeEvidence")] RuntimeEvidenceDto? RuntimeEvidence = null, + [property: JsonPropertyName("policyEvaluation")] PolicyEvaluationEvidenceDto? PolicyEvaluation = null); + +/// +/// Full reachability explanation. +/// +public sealed record ReachabilityExplanationDto( + [property: JsonPropertyName("cveId")] string CveId, + [property: JsonPropertyName("purl")] string Purl, + [property: JsonPropertyName("status")] string Status, + [property: JsonPropertyName("confidence")] double Confidence, + [property: JsonPropertyName("latticeState")] string? LatticeState = null, + [property: JsonPropertyName("pathWitness")] IReadOnlyList? PathWitness = null, + [property: JsonPropertyName("why")] IReadOnlyList? Why = null, + [property: JsonPropertyName("evidence")] EvidenceChainDto? Evidence = null, + [property: JsonPropertyName("spineId")] string? SpineId = null); diff --git a/src/Scanner/StellaOps.Scanner.WebService/Contracts/ReportContracts.cs b/src/Scanner/StellaOps.Scanner.WebService/Contracts/ReportContracts.cs index c3b233014..3904a1146 100644 --- a/src/Scanner/StellaOps.Scanner.WebService/Contracts/ReportContracts.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Contracts/ReportContracts.cs @@ -102,30 +102,3 @@ public sealed record ReportSummaryDto [JsonPropertyOrder(4)] public int Quieted { get; init; } } - -public sealed record DsseEnvelopeDto -{ - [JsonPropertyName("payloadType")] - [JsonPropertyOrder(0)] - public string PayloadType { get; init; } = string.Empty; - - [JsonPropertyName("payload")] - [JsonPropertyOrder(1)] - public string Payload { get; init; } = string.Empty; - - [JsonPropertyName("signatures")] - [JsonPropertyOrder(2)] - public IReadOnlyList Signatures { get; init; } = Array.Empty(); -} - -public sealed record DsseSignatureDto -{ - [JsonPropertyName("keyId")] - public string KeyId { get; init; } = string.Empty; - - [JsonPropertyName("algorithm")] - public string Algorithm { get; init; } = string.Empty; - - [JsonPropertyName("signature")] - public string Signature { get; init; } = string.Empty; -} diff --git a/src/Scanner/StellaOps.Scanner.WebService/Contracts/SbomContracts.cs b/src/Scanner/StellaOps.Scanner.WebService/Contracts/SbomContracts.cs new file mode 100644 index 000000000..aafdd9b51 --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.WebService/Contracts/SbomContracts.cs @@ -0,0 +1,21 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Scanner.WebService.Contracts; + +/// +/// Response when SBOM is accepted. +/// +public sealed record SbomAcceptedResponseDto( + [property: JsonPropertyName("sbomId")] string SbomId, + [property: JsonPropertyName("format")] string Format, + [property: JsonPropertyName("componentCount")] int ComponentCount, + [property: JsonPropertyName("digest")] string Digest); + +/// +/// SBOM format types. +/// +public static class SbomFormats +{ + public const string CycloneDx = "cyclonedx"; + public const string Spdx = "spdx"; +} diff --git a/src/Scanner/StellaOps.Scanner.WebService/Endpoints/CallGraphEndpoints.cs b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/CallGraphEndpoints.cs new file mode 100644 index 000000000..23af258cb --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/CallGraphEndpoints.cs @@ -0,0 +1,244 @@ +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Routing; +using StellaOps.Scanner.WebService.Constants; +using StellaOps.Scanner.WebService.Contracts; +using StellaOps.Scanner.WebService.Domain; +using StellaOps.Scanner.WebService.Infrastructure; +using StellaOps.Scanner.WebService.Security; +using StellaOps.Scanner.WebService.Services; + +namespace StellaOps.Scanner.WebService.Endpoints; + +internal static class CallGraphEndpoints +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + Converters = { new JsonStringEnumConverter() } + }; + + private static readonly HashSet SupportedLanguages = new(StringComparer.OrdinalIgnoreCase) + { + "dotnet", "java", "node", "python", "go", "rust", "binary", "ruby", "php" + }; + + public static void MapCallGraphEndpoints(this RouteGroupBuilder scansGroup) + { + ArgumentNullException.ThrowIfNull(scansGroup); + + // POST /scans/{scanId}/callgraphs + scansGroup.MapPost("/{scanId}/callgraphs", HandleSubmitCallGraphAsync) + .WithName("scanner.scans.callgraphs.submit") + .WithTags("CallGraphs") + .Produces(StatusCodes.Status202Accepted) + .Produces(StatusCodes.Status400BadRequest) + .Produces(StatusCodes.Status404NotFound) + .Produces(StatusCodes.Status409Conflict) + .Produces(StatusCodes.Status413PayloadTooLarge) + .RequireAuthorization(ScannerPolicies.CallGraphIngest); + } + + private static async Task HandleSubmitCallGraphAsync( + string scanId, + CallGraphV1Dto request, + IScanCoordinator coordinator, + ICallGraphIngestionService ingestionService, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(coordinator); + ArgumentNullException.ThrowIfNull(ingestionService); + ArgumentNullException.ThrowIfNull(request); + + if (!ScanId.TryParse(scanId, out var parsed)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid scan identifier", + StatusCodes.Status400BadRequest, + detail: "Scan identifier is required."); + } + + // Validate Content-Digest header for idempotency + var contentDigest = context.Request.Headers["Content-Digest"].FirstOrDefault(); + if (string.IsNullOrWhiteSpace(contentDigest)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Missing Content-Digest header", + StatusCodes.Status400BadRequest, + detail: "Content-Digest header is required for idempotent call graph submission."); + } + + // Verify scan exists + var snapshot = await coordinator.GetAsync(parsed, cancellationToken).ConfigureAwait(false); + if (snapshot is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "Scan not found", + StatusCodes.Status404NotFound, + detail: "Requested scan could not be located."); + } + + // Validate call graph schema + var validationResult = ValidateCallGraph(request); + if (!validationResult.IsValid) + { + var extensions = new Dictionary + { + ["errors"] = validationResult.Errors + }; + + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid call graph", + StatusCodes.Status400BadRequest, + detail: "Call graph validation failed.", + extensions: extensions); + } + + // Check for duplicate submission (idempotency) + var existing = await ingestionService.FindByDigestAsync(parsed, contentDigest, cancellationToken) + .ConfigureAwait(false); + + if (existing is not null) + { + var conflictExtensions = new Dictionary + { + ["callgraphId"] = existing.Id, + ["digest"] = existing.Digest + }; + + return ProblemResultFactory.Create( + context, + ProblemTypes.Conflict, + "Duplicate call graph", + StatusCodes.Status409Conflict, + detail: "Call graph with this Content-Digest already submitted.", + extensions: conflictExtensions); + } + + // Ingest the call graph + var result = await ingestionService.IngestAsync(parsed, request, contentDigest, cancellationToken) + .ConfigureAwait(false); + + var response = new CallGraphAcceptedResponseDto( + CallgraphId: result.CallgraphId, + NodeCount: result.NodeCount, + EdgeCount: result.EdgeCount, + Digest: result.Digest); + + context.Response.Headers.Location = $"/api/scans/{scanId}/callgraphs/{result.CallgraphId}"; + return Json(response, StatusCodes.Status202Accepted); + } + + private static CallGraphValidationResult ValidateCallGraph(CallGraphV1Dto callGraph) + { + var errors = new List(); + + // Validate schema version + if (string.IsNullOrWhiteSpace(callGraph.Schema)) + { + errors.Add("Schema version is required."); + } + else if (!string.Equals(callGraph.Schema, "stella.callgraph.v1", StringComparison.Ordinal)) + { + errors.Add($"Unsupported schema '{callGraph.Schema}'. Expected 'stella.callgraph.v1'."); + } + + // Validate scan key + if (string.IsNullOrWhiteSpace(callGraph.ScanKey)) + { + errors.Add("ScanKey is required."); + } + + // Validate language + if (string.IsNullOrWhiteSpace(callGraph.Language)) + { + errors.Add("Language is required."); + } + else if (!SupportedLanguages.Contains(callGraph.Language)) + { + errors.Add($"Unsupported language '{callGraph.Language}'. Supported: {string.Join(", ", SupportedLanguages)}."); + } + + // Validate nodes + if (callGraph.Nodes is null || callGraph.Nodes.Count == 0) + { + errors.Add("At least one node is required."); + } + else + { + var nodeIds = new HashSet(StringComparer.Ordinal); + for (var i = 0; i < callGraph.Nodes.Count; i++) + { + var node = callGraph.Nodes[i]; + if (string.IsNullOrWhiteSpace(node.NodeId)) + { + errors.Add($"nodes[{i}].nodeId is required."); + } + else if (!nodeIds.Add(node.NodeId)) + { + errors.Add($"Duplicate nodeId '{node.NodeId}'."); + } + + if (string.IsNullOrWhiteSpace(node.SymbolKey)) + { + errors.Add($"nodes[{i}].symbolKey is required."); + } + } + } + + // Validate edges + if (callGraph.Edges is null || callGraph.Edges.Count == 0) + { + errors.Add("At least one edge is required."); + } + else + { + var nodeIds = callGraph.Nodes? + .Where(n => !string.IsNullOrWhiteSpace(n.NodeId)) + .Select(n => n.NodeId) + .ToHashSet(StringComparer.Ordinal) ?? new HashSet(); + + for (var i = 0; i < callGraph.Edges.Count; i++) + { + var edge = callGraph.Edges[i]; + if (string.IsNullOrWhiteSpace(edge.From)) + { + errors.Add($"edges[{i}].from is required."); + } + else if (nodeIds.Count > 0 && !nodeIds.Contains(edge.From)) + { + errors.Add($"edges[{i}].from references unknown node '{edge.From}'."); + } + + if (string.IsNullOrWhiteSpace(edge.To)) + { + errors.Add($"edges[{i}].to is required."); + } + else if (nodeIds.Count > 0 && !nodeIds.Contains(edge.To)) + { + errors.Add($"edges[{i}].to references unknown node '{edge.To}'."); + } + } + } + + return errors.Count > 0 + ? CallGraphValidationResult.Failure(errors.ToArray()) + : CallGraphValidationResult.Success(); + } + + private static IResult Json(T value, int statusCode) + { + var payload = JsonSerializer.Serialize(value, SerializerOptions); + return Results.Content(payload, "application/json", System.Text.Encoding.UTF8, statusCode); + } +} diff --git a/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ExportEndpoints.cs b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ExportEndpoints.cs new file mode 100644 index 000000000..0eafc8390 --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ExportEndpoints.cs @@ -0,0 +1,188 @@ +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Routing; +using StellaOps.Scanner.WebService.Constants; +using StellaOps.Scanner.WebService.Domain; +using StellaOps.Scanner.WebService.Infrastructure; +using StellaOps.Scanner.WebService.Security; +using StellaOps.Scanner.WebService.Services; + +namespace StellaOps.Scanner.WebService.Endpoints; + +internal static class ExportEndpoints +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + Converters = { new JsonStringEnumConverter() }, + WriteIndented = true + }; + + public static void MapExportEndpoints(this RouteGroupBuilder scansGroup) + { + ArgumentNullException.ThrowIfNull(scansGroup); + + // GET /scans/{scanId}/exports/sarif + scansGroup.MapGet("/{scanId}/exports/sarif", HandleExportSarifAsync) + .WithName("scanner.scans.exports.sarif") + .WithTags("Exports") + .Produces(StatusCodes.Status200OK, contentType: "application/sarif+json") + .Produces(StatusCodes.Status404NotFound) + .RequireAuthorization(ScannerPolicies.ScansRead); + + // GET /scans/{scanId}/exports/cdxr + scansGroup.MapGet("/{scanId}/exports/cdxr", HandleExportCycloneDxRAsync) + .WithName("scanner.scans.exports.cdxr") + .WithTags("Exports") + .Produces(StatusCodes.Status200OK, contentType: "application/vnd.cyclonedx+json") + .Produces(StatusCodes.Status404NotFound) + .RequireAuthorization(ScannerPolicies.ScansRead); + + // GET /scans/{scanId}/exports/openvex + scansGroup.MapGet("/{scanId}/exports/openvex", HandleExportOpenVexAsync) + .WithName("scanner.scans.exports.openvex") + .WithTags("Exports") + .Produces(StatusCodes.Status200OK, contentType: "application/json") + .Produces(StatusCodes.Status404NotFound) + .RequireAuthorization(ScannerPolicies.ScansRead); + } + + private static async Task HandleExportSarifAsync( + string scanId, + IScanCoordinator coordinator, + ISarifExportService exportService, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(coordinator); + ArgumentNullException.ThrowIfNull(exportService); + + if (!ScanId.TryParse(scanId, out var parsed)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid scan identifier", + StatusCodes.Status400BadRequest, + detail: "Scan identifier is required."); + } + + var snapshot = await coordinator.GetAsync(parsed, cancellationToken).ConfigureAwait(false); + if (snapshot is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "Scan not found", + StatusCodes.Status404NotFound, + detail: "Requested scan could not be located."); + } + + var sarifDocument = await exportService.ExportAsync(parsed, cancellationToken).ConfigureAwait(false); + if (sarifDocument is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "No findings available", + StatusCodes.Status404NotFound, + detail: "No findings available for SARIF export."); + } + + var json = JsonSerializer.Serialize(sarifDocument, SerializerOptions); + return Results.Content(json, "application/sarif+json", System.Text.Encoding.UTF8, StatusCodes.Status200OK); + } + + private static async Task HandleExportCycloneDxRAsync( + string scanId, + IScanCoordinator coordinator, + ICycloneDxExportService exportService, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(coordinator); + ArgumentNullException.ThrowIfNull(exportService); + + if (!ScanId.TryParse(scanId, out var parsed)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid scan identifier", + StatusCodes.Status400BadRequest, + detail: "Scan identifier is required."); + } + + var snapshot = await coordinator.GetAsync(parsed, cancellationToken).ConfigureAwait(false); + if (snapshot is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "Scan not found", + StatusCodes.Status404NotFound, + detail: "Requested scan could not be located."); + } + + var cdxDocument = await exportService.ExportWithReachabilityAsync(parsed, cancellationToken).ConfigureAwait(false); + if (cdxDocument is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "No findings available", + StatusCodes.Status404NotFound, + detail: "No findings available for CycloneDX export."); + } + + var json = JsonSerializer.Serialize(cdxDocument, SerializerOptions); + return Results.Content(json, "application/vnd.cyclonedx+json", System.Text.Encoding.UTF8, StatusCodes.Status200OK); + } + + private static async Task HandleExportOpenVexAsync( + string scanId, + IScanCoordinator coordinator, + IOpenVexExportService exportService, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(coordinator); + ArgumentNullException.ThrowIfNull(exportService); + + if (!ScanId.TryParse(scanId, out var parsed)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid scan identifier", + StatusCodes.Status400BadRequest, + detail: "Scan identifier is required."); + } + + var snapshot = await coordinator.GetAsync(parsed, cancellationToken).ConfigureAwait(false); + if (snapshot is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "Scan not found", + StatusCodes.Status404NotFound, + detail: "Requested scan could not be located."); + } + + var vexDocument = await exportService.ExportAsync(parsed, cancellationToken).ConfigureAwait(false); + if (vexDocument is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "No VEX data available", + StatusCodes.Status404NotFound, + detail: "No VEX data available for export."); + } + + var json = JsonSerializer.Serialize(vexDocument, SerializerOptions); + return Results.Content(json, "application/json", System.Text.Encoding.UTF8, StatusCodes.Status200OK); + } +} diff --git a/src/Scanner/StellaOps.Scanner.WebService/Endpoints/PolicyEndpoints.cs b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/PolicyEndpoints.cs index 9599937b2..fa268610b 100644 --- a/src/Scanner/StellaOps.Scanner.WebService/Endpoints/PolicyEndpoints.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/PolicyEndpoints.cs @@ -544,7 +544,7 @@ internal static class PolicyEndpoints Rekor = rekor, BuildIds = decision.BuildIds is { Count: > 0 } ? decision.BuildIds.ToArray() : null, Metadata = decision.Metadata is { Count: > 0 } - ? new ReadOnlyDictionary(decision.Metadata.ToDictionary(kv => kv.Key, kv => kv.Value, StringComparer.Ordinal)) + ? new ReadOnlyDictionary(decision.Metadata.ToDictionary(kv => kv.Key, kv => kv.Value?.ToString() ?? string.Empty, StringComparer.Ordinal)) : null }; } diff --git a/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ReachabilityEndpoints.cs b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ReachabilityEndpoints.cs new file mode 100644 index 000000000..45abe28da --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ReachabilityEndpoints.cs @@ -0,0 +1,320 @@ +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Routing; +using StellaOps.Scanner.WebService.Constants; +using StellaOps.Scanner.WebService.Contracts; +using StellaOps.Scanner.WebService.Domain; +using StellaOps.Scanner.WebService.Infrastructure; +using StellaOps.Scanner.WebService.Security; +using StellaOps.Scanner.WebService.Services; + +namespace StellaOps.Scanner.WebService.Endpoints; + +internal static class ReachabilityEndpoints +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + Converters = { new JsonStringEnumConverter() } + }; + + public static void MapReachabilityEndpoints(this RouteGroupBuilder scansGroup) + { + ArgumentNullException.ThrowIfNull(scansGroup); + + // POST /scans/{scanId}/compute-reachability + scansGroup.MapPost("/{scanId}/compute-reachability", HandleComputeReachabilityAsync) + .WithName("scanner.scans.compute-reachability") + .WithTags("Reachability") + .Produces(StatusCodes.Status202Accepted) + .Produces(StatusCodes.Status400BadRequest) + .Produces(StatusCodes.Status404NotFound) + .Produces(StatusCodes.Status409Conflict) + .RequireAuthorization(ScannerPolicies.ScansWrite); + + // GET /scans/{scanId}/reachability/components + scansGroup.MapGet("/{scanId}/reachability/components", HandleGetComponentsAsync) + .WithName("scanner.scans.reachability.components") + .WithTags("Reachability") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status404NotFound) + .RequireAuthorization(ScannerPolicies.ScansRead); + + // GET /scans/{scanId}/reachability/findings + scansGroup.MapGet("/{scanId}/reachability/findings", HandleGetFindingsAsync) + .WithName("scanner.scans.reachability.findings") + .WithTags("Reachability") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status404NotFound) + .RequireAuthorization(ScannerPolicies.ScansRead); + + // GET /scans/{scanId}/reachability/explain + scansGroup.MapGet("/{scanId}/reachability/explain", HandleExplainAsync) + .WithName("scanner.scans.reachability.explain") + .WithTags("Reachability") + .Produces(StatusCodes.Status200OK) + .Produces(StatusCodes.Status400BadRequest) + .Produces(StatusCodes.Status404NotFound) + .RequireAuthorization(ScannerPolicies.ScansRead); + } + + private static async Task HandleComputeReachabilityAsync( + string scanId, + ComputeReachabilityRequestDto? request, + IScanCoordinator coordinator, + IReachabilityComputeService computeService, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(coordinator); + ArgumentNullException.ThrowIfNull(computeService); + + if (!ScanId.TryParse(scanId, out var parsed)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid scan identifier", + StatusCodes.Status400BadRequest, + detail: "Scan identifier is required."); + } + + var snapshot = await coordinator.GetAsync(parsed, cancellationToken).ConfigureAwait(false); + if (snapshot is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "Scan not found", + StatusCodes.Status404NotFound, + detail: "Requested scan could not be located."); + } + + var jobResult = await computeService.TriggerComputeAsync( + parsed, + request?.ForceRecompute ?? false, + request?.Entrypoints, + request?.Targets, + cancellationToken).ConfigureAwait(false); + + if (jobResult.AlreadyInProgress) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Conflict, + "Computation already in progress", + StatusCodes.Status409Conflict, + detail: $"Reachability computation already running for scan {scanId}."); + } + + var response = new ComputeReachabilityResponseDto( + JobId: jobResult.JobId, + Status: jobResult.Status, + EstimatedDuration: jobResult.EstimatedDuration); + + return Json(response, StatusCodes.Status202Accepted); + } + + private static async Task HandleGetComponentsAsync( + string scanId, + string? purl, + string? status, + IScanCoordinator coordinator, + IReachabilityQueryService queryService, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(coordinator); + ArgumentNullException.ThrowIfNull(queryService); + + if (!ScanId.TryParse(scanId, out var parsed)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid scan identifier", + StatusCodes.Status400BadRequest, + detail: "Scan identifier is required."); + } + + var snapshot = await coordinator.GetAsync(parsed, cancellationToken).ConfigureAwait(false); + if (snapshot is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "Scan not found", + StatusCodes.Status404NotFound, + detail: "Requested scan could not be located."); + } + + var components = await queryService.GetComponentsAsync( + parsed, + purl, + status, + cancellationToken).ConfigureAwait(false); + + var items = components + .Select(c => new ComponentReachabilityDto( + c.Purl, + c.Status, + c.Confidence, + c.LatticeState, + c.Why)) + .ToList(); + + var response = new ComponentReachabilityListDto(items, items.Count); + return Json(response, StatusCodes.Status200OK); + } + + private static async Task HandleGetFindingsAsync( + string scanId, + string? cve, + string? status, + IScanCoordinator coordinator, + IReachabilityQueryService queryService, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(coordinator); + ArgumentNullException.ThrowIfNull(queryService); + + if (!ScanId.TryParse(scanId, out var parsed)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid scan identifier", + StatusCodes.Status400BadRequest, + detail: "Scan identifier is required."); + } + + var snapshot = await coordinator.GetAsync(parsed, cancellationToken).ConfigureAwait(false); + if (snapshot is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "Scan not found", + StatusCodes.Status404NotFound, + detail: "Requested scan could not be located."); + } + + var findings = await queryService.GetFindingsAsync( + parsed, + cve, + status, + cancellationToken).ConfigureAwait(false); + + var items = findings + .Select(f => new ReachabilityFindingDto( + f.CveId, + f.Purl, + f.Status, + f.Confidence, + f.LatticeState, + f.Severity, + f.AffectedVersions)) + .ToList(); + + var response = new ReachabilityFindingListDto(items, items.Count); + return Json(response, StatusCodes.Status200OK); + } + + private static async Task HandleExplainAsync( + string scanId, + string? cve, + string? purl, + IScanCoordinator coordinator, + IReachabilityExplainService explainService, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(coordinator); + ArgumentNullException.ThrowIfNull(explainService); + + if (!ScanId.TryParse(scanId, out var parsed)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid scan identifier", + StatusCodes.Status400BadRequest, + detail: "Scan identifier is required."); + } + + if (string.IsNullOrWhiteSpace(cve) || string.IsNullOrWhiteSpace(purl)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Missing required parameters", + StatusCodes.Status400BadRequest, + detail: "Both 'cve' and 'purl' query parameters are required."); + } + + var snapshot = await coordinator.GetAsync(parsed, cancellationToken).ConfigureAwait(false); + if (snapshot is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "Scan not found", + StatusCodes.Status404NotFound, + detail: "Requested scan could not be located."); + } + + var explanation = await explainService.ExplainAsync( + parsed, + cve.Trim(), + purl.Trim(), + cancellationToken).ConfigureAwait(false); + + if (explanation is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "Explanation not found", + StatusCodes.Status404NotFound, + detail: $"No reachability data for CVE {cve} and PURL {purl}."); + } + + var response = new ReachabilityExplanationDto( + CveId: explanation.CveId, + Purl: explanation.Purl, + Status: explanation.Status, + Confidence: explanation.Confidence, + LatticeState: explanation.LatticeState, + PathWitness: explanation.PathWitness, + Why: explanation.Why? + .Select(r => new ExplanationReasonDto(r.Code, r.Description, r.Impact)) + .ToList(), + Evidence: explanation.Evidence is null ? null : new EvidenceChainDto( + StaticAnalysis: explanation.Evidence.StaticAnalysis is null ? null : + new StaticAnalysisEvidenceDto( + explanation.Evidence.StaticAnalysis.CallgraphDigest, + explanation.Evidence.StaticAnalysis.PathLength, + explanation.Evidence.StaticAnalysis.EdgeTypes), + RuntimeEvidence: explanation.Evidence.RuntimeEvidence is null ? null : + new RuntimeEvidenceDto( + explanation.Evidence.RuntimeEvidence.Observed, + explanation.Evidence.RuntimeEvidence.HitCount, + explanation.Evidence.RuntimeEvidence.LastObserved), + PolicyEvaluation: explanation.Evidence.PolicyEvaluation is null ? null : + new PolicyEvaluationEvidenceDto( + explanation.Evidence.PolicyEvaluation.PolicyDigest, + explanation.Evidence.PolicyEvaluation.Verdict, + explanation.Evidence.PolicyEvaluation.VerdictReason)), + SpineId: explanation.SpineId); + + return Json(response, StatusCodes.Status200OK); + } + + private static IResult Json(T value, int statusCode) + { + var payload = JsonSerializer.Serialize(value, SerializerOptions); + return Results.Content(payload, "application/json", System.Text.Encoding.UTF8, statusCode); + } +} diff --git a/src/Scanner/StellaOps.Scanner.WebService/Endpoints/SbomEndpoints.cs b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/SbomEndpoints.cs new file mode 100644 index 000000000..f9a8c3e7c --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/SbomEndpoints.cs @@ -0,0 +1,169 @@ +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Routing; +using StellaOps.Scanner.WebService.Constants; +using StellaOps.Scanner.WebService.Contracts; +using StellaOps.Scanner.WebService.Domain; +using StellaOps.Scanner.WebService.Infrastructure; +using StellaOps.Scanner.WebService.Security; +using StellaOps.Scanner.WebService.Services; + +namespace StellaOps.Scanner.WebService.Endpoints; + +internal static class SbomEndpoints +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + Converters = { new JsonStringEnumConverter() } + }; + + public static void MapSbomEndpoints(this RouteGroupBuilder scansGroup) + { + ArgumentNullException.ThrowIfNull(scansGroup); + + // POST /scans/{scanId}/sbom + scansGroup.MapPost("/{scanId}/sbom", HandleSubmitSbomAsync) + .WithName("scanner.scans.sbom.submit") + .WithTags("Scans") + .Accepts("application/vnd.cyclonedx+json", "application/spdx+json", "application/json") + .Produces(StatusCodes.Status202Accepted) + .Produces(StatusCodes.Status400BadRequest) + .Produces(StatusCodes.Status404NotFound) + .RequireAuthorization(ScannerPolicies.ScansWrite); + } + + private static async Task HandleSubmitSbomAsync( + string scanId, + IScanCoordinator coordinator, + ISbomIngestionService ingestionService, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(coordinator); + ArgumentNullException.ThrowIfNull(ingestionService); + + if (!ScanId.TryParse(scanId, out var parsed)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid scan identifier", + StatusCodes.Status400BadRequest, + detail: "Scan identifier is required."); + } + + // Verify scan exists + var snapshot = await coordinator.GetAsync(parsed, cancellationToken).ConfigureAwait(false); + if (snapshot is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.NotFound, + "Scan not found", + StatusCodes.Status404NotFound, + detail: "Requested scan could not be located."); + } + + // Parse JSON body + JsonDocument sbomDocument; + try + { + sbomDocument = await JsonDocument.ParseAsync( + context.Request.Body, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid JSON", + StatusCodes.Status400BadRequest, + detail: $"Failed to parse SBOM JSON: {ex.Message}"); + } + + // Detect format from Content-Type or document structure + var contentType = context.Request.ContentType ?? "application/json"; + var format = DetectSbomFormat(contentType, sbomDocument, ingestionService); + + if (format is null) + { + sbomDocument.Dispose(); + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Unknown SBOM format", + StatusCodes.Status400BadRequest, + detail: "Could not detect SBOM format. Use Content-Type 'application/vnd.cyclonedx+json' or 'application/spdx+json'."); + } + + // Validate the SBOM + var validationResult = ingestionService.Validate(sbomDocument, format); + if (!validationResult.IsValid) + { + sbomDocument.Dispose(); + var extensions = new Dictionary + { + ["errors"] = validationResult.Errors + }; + + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid SBOM", + StatusCodes.Status400BadRequest, + detail: "SBOM validation failed.", + extensions: extensions); + } + + // Optional Content-Digest for idempotency + var contentDigest = context.Request.Headers["Content-Digest"].FirstOrDefault(); + + // Ingest the SBOM + var result = await ingestionService.IngestAsync( + parsed, + sbomDocument, + format, + contentDigest, + cancellationToken).ConfigureAwait(false); + + sbomDocument.Dispose(); + + var response = new SbomAcceptedResponseDto( + SbomId: result.SbomId, + Format: result.Format, + ComponentCount: result.ComponentCount, + Digest: result.Digest); + + context.Response.Headers.Location = $"/api/scans/{scanId}/sbom/{result.SbomId}"; + return Json(response, StatusCodes.Status202Accepted); + } + + private static string? DetectSbomFormat( + string contentType, + JsonDocument document, + ISbomIngestionService ingestionService) + { + // Check Content-Type first + if (contentType.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase)) + { + return SbomFormats.CycloneDx; + } + + if (contentType.Contains("spdx", StringComparison.OrdinalIgnoreCase)) + { + return SbomFormats.Spdx; + } + + // Fall back to document structure detection + return ingestionService.DetectFormat(document); + } + + private static IResult Json(T value, int statusCode) + { + var payload = JsonSerializer.Serialize(value, SerializerOptions); + return Results.Content(payload, "application/json", System.Text.Encoding.UTF8, statusCode); + } +} diff --git a/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs index c5568e586..c990ef941 100644 --- a/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs @@ -78,6 +78,12 @@ internal static class ScanEndpoints .Produces(StatusCodes.Status200OK) .Produces(StatusCodes.Status404NotFound) .RequireAuthorization(ScannerPolicies.ScansRead); + + // Register additional scan-related endpoints + scans.MapCallGraphEndpoints(); + scans.MapSbomEndpoints(); + scans.MapReachabilityEndpoints(); + scans.MapExportEndpoints(); } private static async Task HandleSubmitAsync( @@ -125,7 +131,7 @@ internal static class ScanEndpoints } var target = new ScanTarget(reference, digest).Normalize(); - var metadata = NormalizeMetadata(request.Metadata); + var metadata = NormalizeMetadataAsDictionary(request.Metadata); var determinism = options.Value?.Determinism ?? new ScannerWebServiceOptions.DeterminismOptions(); if (!string.IsNullOrWhiteSpace(determinism.FeedSnapshotId) && !metadata.ContainsKey("determinism.feed")) @@ -562,10 +568,13 @@ internal static class ScanEndpoints } private static IReadOnlyDictionary NormalizeMetadata(IDictionary metadata) + => NormalizeMetadataAsDictionary(metadata); + + private static Dictionary NormalizeMetadataAsDictionary(IDictionary? metadata) { if (metadata is null || metadata.Count == 0) { - return new Dictionary(); + return new Dictionary(StringComparer.OrdinalIgnoreCase); } var normalized = new Dictionary(StringComparer.OrdinalIgnoreCase); diff --git a/src/Scanner/StellaOps.Scanner.WebService/Program.cs b/src/Scanner/StellaOps.Scanner.WebService/Program.cs index 9d8aeb89a..63d582a67 100644 --- a/src/Scanner/StellaOps.Scanner.WebService/Program.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Program.cs @@ -32,14 +32,11 @@ using StellaOps.Scanner.WebService.Endpoints; using StellaOps.Scanner.WebService.Extensions; using StellaOps.Scanner.WebService.Hosting; using StellaOps.Scanner.WebService.Options; -using StellaOps.Scanner.WebService.Options; using StellaOps.Scanner.WebService.Services; using StellaOps.Scanner.WebService.Security; using StellaOps.Scanner.WebService.Replay; using StellaOps.Scanner.Storage; using StellaOps.Scanner.Storage.Extensions; -using StellaOps.Scanner.WebService.Endpoints; -using StellaOps.Scanner.WebService.Options; var builder = WebApplication.CreateBuilder(args); diff --git a/src/Scanner/StellaOps.Scanner.WebService/Replay/RecordModeService.cs b/src/Scanner/StellaOps.Scanner.WebService/Replay/RecordModeService.cs index 63ea3afed..8cba15b60 100644 --- a/src/Scanner/StellaOps.Scanner.WebService/Replay/RecordModeService.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Replay/RecordModeService.cs @@ -286,6 +286,10 @@ public sealed record RecordModeRequest( ReadOnlyMemory Vex, ReadOnlyMemory Log) { + public string? VexDigest { get; init; } + + public string? LogDigest { get; init; } + public string? PolicyDigest { get; init; } public string? FeedSnapshot { get; init; } diff --git a/src/Scanner/StellaOps.Scanner.WebService/Security/ScannerPolicies.cs b/src/Scanner/StellaOps.Scanner.WebService/Security/ScannerPolicies.cs index 4deede91e..1eff15869 100644 --- a/src/Scanner/StellaOps.Scanner.WebService/Security/ScannerPolicies.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Security/ScannerPolicies.cs @@ -4,6 +4,8 @@ internal static class ScannerPolicies { public const string ScansEnqueue = "scanner.api"; public const string ScansRead = "scanner.scans.read"; + public const string ScansWrite = "scanner.scans.write"; public const string Reports = "scanner.reports"; public const string RuntimeIngest = "scanner.runtime.ingest"; + public const string CallGraphIngest = "scanner.callgraph.ingest"; } diff --git a/src/Scanner/StellaOps.Scanner.WebService/Services/ConcelierHttpLinksetQueryService.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/ConcelierHttpLinksetQueryService.cs index 400c25306..1c0b3ead5 100644 --- a/src/Scanner/StellaOps.Scanner.WebService/Services/ConcelierHttpLinksetQueryService.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Services/ConcelierHttpLinksetQueryService.cs @@ -1,3 +1,4 @@ +using System.Collections.Immutable; using System.Net.Http.Json; using System.Text.Json; using System.Text.Json.Serialization; diff --git a/src/Scanner/StellaOps.Scanner.WebService/Services/DeltaScanRequestHandler.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/DeltaScanRequestHandler.cs index 801e817e9..007e80913 100644 --- a/src/Scanner/StellaOps.Scanner.WebService/Services/DeltaScanRequestHandler.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Services/DeltaScanRequestHandler.cs @@ -187,7 +187,7 @@ internal sealed class DeltaScanRequestHandler : IDeltaScanRequestHandler _logger.LogInformation( "Delta scan triggered for DRIFT event {EventId}: scanId={ScanId}, created={Created}", runtimeEvent.EventId, - result.Snapshot.Id, + result.Snapshot.ScanId, result.Created); } catch (Exception ex) diff --git a/src/Scanner/StellaOps.Scanner.WebService/Services/ICallGraphIngestionService.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/ICallGraphIngestionService.cs new file mode 100644 index 000000000..ae6119d2b --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.WebService/Services/ICallGraphIngestionService.cs @@ -0,0 +1,52 @@ +using StellaOps.Scanner.WebService.Contracts; +using StellaOps.Scanner.WebService.Domain; + +namespace StellaOps.Scanner.WebService.Services; + +/// +/// Result of call graph ingestion. +/// +public sealed record CallGraphIngestionResult( + string CallgraphId, + int NodeCount, + int EdgeCount, + string Digest); + +/// +/// Service for ingesting call graphs. +/// +public interface ICallGraphIngestionService +{ + /// + /// Finds an existing call graph by digest for idempotency checks. + /// + Task FindByDigestAsync( + ScanId scanId, + string contentDigest, + CancellationToken cancellationToken = default); + + /// + /// Ingests a call graph for a scan. + /// + Task IngestAsync( + ScanId scanId, + CallGraphV1Dto callGraph, + string contentDigest, + CancellationToken cancellationToken = default); + + /// + /// Validates a call graph before ingestion. + /// + CallGraphValidationResult Validate(CallGraphV1Dto callGraph); +} + +/// +/// Result of call graph validation. +/// +public sealed record CallGraphValidationResult( + bool IsValid, + IReadOnlyList? Errors = null) +{ + public static CallGraphValidationResult Success() => new(true); + public static CallGraphValidationResult Failure(params string[] errors) => new(false, errors); +} diff --git a/src/Scanner/StellaOps.Scanner.WebService/Services/IExportServices.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/IExportServices.cs new file mode 100644 index 000000000..cdac99114 --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.WebService/Services/IExportServices.cs @@ -0,0 +1,36 @@ +using StellaOps.Scanner.WebService.Domain; + +namespace StellaOps.Scanner.WebService.Services; + +/// +/// Service for exporting findings as SARIF. +/// +public interface ISarifExportService +{ + /// + /// Exports scan findings as a SARIF document. + /// + Task ExportAsync(ScanId scanId, CancellationToken cancellationToken = default); +} + +/// +/// Service for exporting findings as CycloneDX with reachability extension. +/// +public interface ICycloneDxExportService +{ + /// + /// Exports scan findings as CycloneDX with reachability annotations. + /// + Task ExportWithReachabilityAsync(ScanId scanId, CancellationToken cancellationToken = default); +} + +/// +/// Service for exporting VEX decisions as OpenVEX. +/// +public interface IOpenVexExportService +{ + /// + /// Exports VEX decisions for the scan as OpenVEX format. + /// + Task ExportAsync(ScanId scanId, CancellationToken cancellationToken = default); +} diff --git a/src/Scanner/StellaOps.Scanner.WebService/Services/IReachabilityComputeService.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/IReachabilityComputeService.cs new file mode 100644 index 000000000..f3d5d6e7d --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.WebService/Services/IReachabilityComputeService.cs @@ -0,0 +1,28 @@ +using StellaOps.Scanner.WebService.Domain; + +namespace StellaOps.Scanner.WebService.Services; + +/// +/// Result of triggering reachability computation. +/// +public sealed record ComputeJobResult( + string JobId, + string Status, + bool AlreadyInProgress, + string? EstimatedDuration = null); + +/// +/// Service for triggering reachability computation. +/// +public interface IReachabilityComputeService +{ + /// + /// Triggers reachability computation for a scan. + /// + Task TriggerComputeAsync( + ScanId scanId, + bool forceRecompute, + IReadOnlyList? entrypoints, + IReadOnlyList? targets, + CancellationToken cancellationToken = default); +} diff --git a/src/Scanner/StellaOps.Scanner.WebService/Services/IReachabilityExplainService.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/IReachabilityExplainService.cs new file mode 100644 index 000000000..302fa13cb --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.WebService/Services/IReachabilityExplainService.cs @@ -0,0 +1,72 @@ +using StellaOps.Scanner.WebService.Domain; + +namespace StellaOps.Scanner.WebService.Services; + +/// +/// Explanation reason with code and impact. +/// +public sealed record ExplanationReason( + string Code, + string Description, + double? Impact = null); + +/// +/// Static analysis evidence. +/// +public sealed record StaticAnalysisEvidence( + string? CallgraphDigest = null, + int? PathLength = null, + IReadOnlyList? EdgeTypes = null); + +/// +/// Runtime evidence. +/// +public sealed record RuntimeEvidence( + bool Observed, + int HitCount = 0, + DateTimeOffset? LastObserved = null); + +/// +/// Policy evaluation result. +/// +public sealed record PolicyEvaluationEvidence( + string? PolicyDigest = null, + string? Verdict = null, + string? VerdictReason = null); + +/// +/// Evidence chain for explanation. +/// +public sealed record EvidenceChain( + StaticAnalysisEvidence? StaticAnalysis = null, + RuntimeEvidence? RuntimeEvidence = null, + PolicyEvaluationEvidence? PolicyEvaluation = null); + +/// +/// Full reachability explanation. +/// +public sealed record ReachabilityExplanation( + string CveId, + string Purl, + string Status, + double Confidence, + string? LatticeState = null, + IReadOnlyList? PathWitness = null, + IReadOnlyList? Why = null, + EvidenceChain? Evidence = null, + string? SpineId = null); + +/// +/// Service for explaining reachability decisions. +/// +public interface IReachabilityExplainService +{ + /// + /// Explains why a CVE affects a component. + /// + Task ExplainAsync( + ScanId scanId, + string cveId, + string purl, + CancellationToken cancellationToken = default); +} diff --git a/src/Scanner/StellaOps.Scanner.WebService/Services/IReachabilityQueryService.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/IReachabilityQueryService.cs new file mode 100644 index 000000000..d75ba3444 --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.WebService/Services/IReachabilityQueryService.cs @@ -0,0 +1,49 @@ +using StellaOps.Scanner.WebService.Domain; + +namespace StellaOps.Scanner.WebService.Services; + +/// +/// Component reachability result. +/// +public sealed record ComponentReachability( + string Purl, + string Status, + double Confidence, + string? LatticeState = null, + IReadOnlyList? Why = null); + +/// +/// Reachability finding result. +/// +public sealed record ReachabilityFinding( + string CveId, + string Purl, + string Status, + double Confidence, + string? LatticeState = null, + string? Severity = null, + string? AffectedVersions = null); + +/// +/// Service for querying reachability results. +/// +public interface IReachabilityQueryService +{ + /// + /// Gets component reachability results for a scan. + /// + Task> GetComponentsAsync( + ScanId scanId, + string? purlFilter, + string? statusFilter, + CancellationToken cancellationToken = default); + + /// + /// Gets vulnerability findings with reachability for a scan. + /// + Task> GetFindingsAsync( + ScanId scanId, + string? cveFilter, + string? statusFilter, + CancellationToken cancellationToken = default); +} diff --git a/src/Scanner/StellaOps.Scanner.WebService/Services/ISbomIngestionService.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/ISbomIngestionService.cs new file mode 100644 index 000000000..376d50aef --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.WebService/Services/ISbomIngestionService.cs @@ -0,0 +1,51 @@ +using System.Text.Json; +using StellaOps.Scanner.WebService.Contracts; +using StellaOps.Scanner.WebService.Domain; + +namespace StellaOps.Scanner.WebService.Services; + +/// +/// Result of SBOM ingestion. +/// +public sealed record SbomIngestionResult( + string SbomId, + string Format, + int ComponentCount, + string Digest); + +/// +/// Service for ingesting SBOMs (CycloneDX or SPDX). +/// +public interface ISbomIngestionService +{ + /// + /// Ingests an SBOM for a scan. + /// + Task IngestAsync( + ScanId scanId, + JsonDocument sbomDocument, + string format, + string? contentDigest, + CancellationToken cancellationToken = default); + + /// + /// Detects the SBOM format from the document. + /// + string? DetectFormat(JsonDocument sbomDocument); + + /// + /// Validates an SBOM document. + /// + SbomValidationResult Validate(JsonDocument sbomDocument, string format); +} + +/// +/// Result of SBOM validation. +/// +public sealed record SbomValidationResult( + bool IsValid, + IReadOnlyList? Errors = null) +{ + public static SbomValidationResult Success() => new(true); + public static SbomValidationResult Failure(params string[] errors) => new(false, errors); +} diff --git a/src/Signals/StellaOps.Signals/Options/UnknownsRescanOptions.cs b/src/Signals/StellaOps.Signals/Options/UnknownsRescanOptions.cs new file mode 100644 index 000000000..4dab12512 --- /dev/null +++ b/src/Signals/StellaOps.Signals/Options/UnknownsRescanOptions.cs @@ -0,0 +1,44 @@ +namespace StellaOps.Signals.Options; + +/// +/// Configuration for the unknowns rescan background worker. +/// +public sealed class UnknownsRescanOptions +{ + public const string SectionName = "Signals:UnknownsRescan"; + + /// + /// Whether the rescan worker is enabled. Default: true + /// + public bool Enabled { get; set; } = true; + + /// + /// Poll interval for checking due items. Default: 60 seconds + /// + public TimeSpan PollInterval { get; set; } = TimeSpan.FromSeconds(60); + + /// + /// Maximum HOT items to process per poll cycle. Default: 50 + /// + public int HotBatchSize { get; set; } = 50; + + /// + /// Maximum WARM items to process per poll cycle. Default: 100 + /// + public int WarmBatchSize { get; set; } = 100; + + /// + /// Maximum COLD items to process in weekly batch. Default: 500 + /// + public int ColdBatchSize { get; set; } = 500; + + /// + /// Day of week for COLD batch processing. Default: Sunday + /// + public DayOfWeek ColdBatchDay { get; set; } = DayOfWeek.Sunday; + + /// + /// Hour (UTC) for COLD batch processing. Default: 3 (3 AM UTC) + /// + public int ColdBatchHourUtc { get; set; } = 3; +} diff --git a/src/Signals/StellaOps.Signals/Parsing/SimpleJsonCallgraphParser.cs b/src/Signals/StellaOps.Signals/Parsing/SimpleJsonCallgraphParser.cs index 7a275afec..9a923957c 100644 --- a/src/Signals/StellaOps.Signals/Parsing/SimpleJsonCallgraphParser.cs +++ b/src/Signals/StellaOps.Signals/Parsing/SimpleJsonCallgraphParser.cs @@ -251,6 +251,102 @@ public sealed class SimpleJsonCallgraphParser : ICallgraphParser return true; } + private static bool TryParseFlatGraph(JsonElement root, out CallgraphParseResult result) + { + result = default!; + + // Flat graph format: array of edges only, nodes derived from edge endpoints + if (root.ValueKind != JsonValueKind.Array) + { + return false; + } + + var edges = new List(); + var uniqueNodeIds = new HashSet(StringComparer.Ordinal); + + foreach (var edgeElement in root.EnumerateArray()) + { + var source = GetString(edgeElement, "source", "from"); + var target = GetString(edgeElement, "target", "to"); + if (string.IsNullOrWhiteSpace(source) || string.IsNullOrWhiteSpace(target)) + { + continue; + } + + uniqueNodeIds.Add(source.Trim()); + uniqueNodeIds.Add(target.Trim()); + + edges.Add(new CallgraphEdge + { + SourceId = source.Trim(), + TargetId = target.Trim(), + Type = GetString(edgeElement, "type", "kind") ?? "call", + Purl = GetString(edgeElement, "purl"), + SymbolDigest = GetString(edgeElement, "symbol_digest", "symbolDigest"), + Candidates = GetStringArray(edgeElement, "candidates"), + Confidence = GetNullableDouble(edgeElement, "confidence"), + Evidence = GetStringArray(edgeElement, "evidence") + }); + } + + if (edges.Count == 0) + { + return false; + } + + var nodes = new List(); + foreach (var nodeId in uniqueNodeIds) + { + nodes.Add(new CallgraphNode { Id = nodeId, Name = nodeId, Kind = "function" }); + } + + result = new CallgraphParseResult( + nodes, + edges, + Array.Empty(), + "1.0", + "1.0", + null); + return true; + } + + private static IReadOnlyList ParseEntrypoints(JsonElement root) + { + if (!root.TryGetProperty("entrypoints", out var entrypointsEl) || entrypointsEl.ValueKind != JsonValueKind.Array) + { + return Array.Empty(); + } + + var entrypoints = new List(entrypointsEl.GetArrayLength()); + var order = 0; + foreach (var ep in entrypointsEl.EnumerateArray()) + { + var nodeId = GetString(ep, "nodeId", "node_id"); + if (string.IsNullOrWhiteSpace(nodeId)) + { + continue; + } + + var kindStr = GetString(ep, "kind") ?? "unknown"; + var phaseStr = GetString(ep, "phase") ?? "runtime"; + var frameworkStr = GetString(ep, "framework") ?? "unknown"; + + entrypoints.Add(new CallgraphEntrypoint + { + NodeId = nodeId.Trim(), + Kind = Enum.TryParse(kindStr, true, out var kind) ? kind : EntrypointKind.Unknown, + Phase = Enum.TryParse(phaseStr, true, out var phase) ? phase : EntrypointPhase.Runtime, + Framework = Enum.TryParse(frameworkStr, true, out var framework) ? framework : EntrypointFramework.Unknown, + Route = GetString(ep, "route"), + HttpMethod = GetString(ep, "httpMethod", "http_method"), + Source = GetString(ep, "source"), + Order = order++ + }); + } + + return entrypoints; + } + private static IReadOnlyList ParseRoots(JsonElement root) { if (!root.TryGetProperty("roots", out var rootsEl) || rootsEl.ValueKind != JsonValueKind.Array) diff --git a/src/Signals/StellaOps.Signals/Persistence/IUnknownsRepository.cs b/src/Signals/StellaOps.Signals/Persistence/IUnknownsRepository.cs index 79b94087e..d47eab521 100644 --- a/src/Signals/StellaOps.Signals/Persistence/IUnknownsRepository.cs +++ b/src/Signals/StellaOps.Signals/Persistence/IUnknownsRepository.cs @@ -28,4 +28,18 @@ public interface IUnknownsRepository UnknownsBand band, int limit, CancellationToken cancellationToken); + + /// + /// Queries unknowns with optional band filter and pagination. + /// + Task> QueryAsync( + UnknownsBand? band, + int limit, + int offset, + CancellationToken cancellationToken); + + /// + /// Gets a single unknown by its ID. + /// + Task GetByIdAsync(string id, CancellationToken cancellationToken); } diff --git a/src/Signals/StellaOps.Signals/Persistence/InMemoryUnknownsRepository.cs b/src/Signals/StellaOps.Signals/Persistence/InMemoryUnknownsRepository.cs index c61bb272c..300b420d1 100644 --- a/src/Signals/StellaOps.Signals/Persistence/InMemoryUnknownsRepository.cs +++ b/src/Signals/StellaOps.Signals/Persistence/InMemoryUnknownsRepository.cs @@ -94,6 +94,44 @@ public sealed class InMemoryUnknownsRepository : IUnknownsRepository return Task.FromResult>(results); } + public Task> QueryAsync( + UnknownsBand? band, + int limit, + int offset, + CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + var query = _store.Values.SelectMany(x => x); + + if (band.HasValue) + { + query = query.Where(u => u.Band == band.Value); + } + + var results = query + .OrderByDescending(u => u.Score) + .ThenBy(u => u.Id, StringComparer.OrdinalIgnoreCase) + .Skip(offset) + .Take(limit) + .Select(Clone) + .ToList(); + + return Task.FromResult>(results); + } + + public Task GetByIdAsync(string id, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(id); + cancellationToken.ThrowIfCancellationRequested(); + + var item = _store.Values + .SelectMany(x => x) + .FirstOrDefault(u => string.Equals(u.Id, id, StringComparison.OrdinalIgnoreCase)); + + return Task.FromResult(item is not null ? Clone(item) : null); + } + private static UnknownSymbolDocument Clone(UnknownSymbolDocument source) => new() { Id = source.Id, diff --git a/src/Signals/StellaOps.Signals/Program.cs b/src/Signals/StellaOps.Signals/Program.cs index 44fe05f0c..00e3fb798 100644 --- a/src/Signals/StellaOps.Signals/Program.cs +++ b/src/Signals/StellaOps.Signals/Program.cs @@ -743,6 +743,91 @@ signalsGroup.MapGet("/unknowns/{subjectKey}", async Task ( return items.Count == 0 ? Results.NotFound() : Results.Ok(items); }).WithName("SignalsUnknownsGet"); +signalsGroup.MapGet("/unknowns", async Task ( + HttpContext context, + SignalsOptions options, + IUnknownsRepository repository, + SignalsSealedModeMonitor sealedModeMonitor, + [FromQuery] string? band, + [FromQuery] int limit = 100, + [FromQuery] int offset = 0, + CancellationToken cancellationToken = default) => +{ + if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var authFailure)) + { + return authFailure ?? Results.Unauthorized(); + } + + if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure)) + { + return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable); + } + + limit = Math.Clamp(limit, 1, 1000); + offset = Math.Max(0, offset); + + UnknownsBand? bandFilter = null; + if (!string.IsNullOrWhiteSpace(band) && Enum.TryParse(band, ignoreCase: true, out var parsedBand)) + { + bandFilter = parsedBand; + } + + var items = await repository.QueryAsync(bandFilter, limit, offset, cancellationToken).ConfigureAwait(false); + return Results.Ok(new + { + items, + count = items.Count, + limit, + offset, + band = bandFilter?.ToString().ToLowerInvariant() + }); +}).WithName("SignalsUnknownsQuery"); + +signalsGroup.MapGet("/unknowns/{id}/explain", async Task ( + HttpContext context, + SignalsOptions options, + string id, + IUnknownsRepository repository, + IUnknownsScoringService scoringService, + SignalsSealedModeMonitor sealedModeMonitor, + CancellationToken cancellationToken) => +{ + if (!Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var authFailure)) + { + return authFailure ?? Results.Unauthorized(); + } + + if (!Program.TryEnsureSealedMode(sealedModeMonitor, out var sealedFailure)) + { + return sealedFailure ?? Results.StatusCode(StatusCodes.Status503ServiceUnavailable); + } + + if (string.IsNullOrWhiteSpace(id)) + { + return Results.BadRequest(new { error = "id is required." }); + } + + var unknown = await repository.GetByIdAsync(id.Trim(), cancellationToken).ConfigureAwait(false); + if (unknown is null) + { + return Results.NotFound(new { error = $"Unknown with id '{id}' not found." }); + } + + return Results.Ok(new + { + id = unknown.Id, + subjectKey = unknown.SubjectKey, + band = unknown.Band.ToString().ToLowerInvariant(), + score = unknown.Score, + normalizationTrace = unknown.NormalizationTrace, + flags = unknown.Flags, + nextScheduledRescan = unknown.NextScheduledRescan, + rescanAttempts = unknown.RescanAttempts, + createdAt = unknown.CreatedAt, + updatedAt = unknown.UpdatedAt + }); +}).WithName("SignalsUnknownsExplain"); + signalsGroup.MapPost("/reachability/recompute", async Task ( HttpContext context, SignalsOptions options, diff --git a/src/Signals/StellaOps.Signals/Services/CallgraphIngestionService.cs b/src/Signals/StellaOps.Signals/Services/CallgraphIngestionService.cs index 0012bde30..04633aa09 100644 --- a/src/Signals/StellaOps.Signals/Services/CallgraphIngestionService.cs +++ b/src/Signals/StellaOps.Signals/Services/CallgraphIngestionService.cs @@ -330,20 +330,6 @@ internal sealed class CallgraphIngestionService : ICallgraphIngestionService return ordered.ToString(); } - private static string JoinDict(IReadOnlyDictionary? values) - { - if (values is null) - { - return string.Empty; - } - - var ordered = new StringBuilder(); - foreach (var kv in values.OrderBy(k => k.Key, StringComparer.Ordinal)) - { - ordered.Append(kv.Key).Append('=').Append(kv.Value).Append(';'); - } - return ordered.ToString(); - } } /// diff --git a/src/Signals/StellaOps.Signals/Services/IRescanOrchestrator.cs b/src/Signals/StellaOps.Signals/Services/IRescanOrchestrator.cs new file mode 100644 index 000000000..92f465313 --- /dev/null +++ b/src/Signals/StellaOps.Signals/Services/IRescanOrchestrator.cs @@ -0,0 +1,64 @@ +using StellaOps.Signals.Models; + +namespace StellaOps.Signals.Services; + +/// +/// Orchestrates rescan operations for unknowns. +/// +public interface IRescanOrchestrator +{ + /// + /// Triggers a rescan for a single unknown item. + /// + Task TriggerRescanAsync( + UnknownSymbolDocument unknown, + RescanPriority priority, + CancellationToken cancellationToken = default); + + /// + /// Triggers a batch rescan for multiple unknown items. + /// + Task TriggerBatchRescanAsync( + IReadOnlyList unknowns, + RescanPriority priority, + CancellationToken cancellationToken = default); +} + +/// +/// Priority level for rescan operations. +/// +public enum RescanPriority +{ + /// + /// Immediate processing for HOT items. + /// + Immediate, + + /// + /// Scheduled processing for WARM items. + /// + Scheduled, + + /// + /// Batch processing for COLD items. + /// + Batch +} + +/// +/// Result of a single rescan operation. +/// +public sealed record RescanResult( + string UnknownId, + bool Success, + string? ErrorMessage = null, + DateTimeOffset? NextScheduledRescan = null); + +/// +/// Result of a batch rescan operation. +/// +public sealed record BatchRescanResult( + int TotalRequested, + int SuccessCount, + int FailureCount, + IReadOnlyList Results); diff --git a/src/Signals/StellaOps.Signals/Services/LoggingRescanOrchestrator.cs b/src/Signals/StellaOps.Signals/Services/LoggingRescanOrchestrator.cs new file mode 100644 index 000000000..f20be4f06 --- /dev/null +++ b/src/Signals/StellaOps.Signals/Services/LoggingRescanOrchestrator.cs @@ -0,0 +1,85 @@ +using Microsoft.Extensions.Logging; +using StellaOps.Signals.Models; + +namespace StellaOps.Signals.Services; + +/// +/// Logging-only implementation of . +/// Placeholder until actual rescan integration is implemented. +/// +public sealed class LoggingRescanOrchestrator : IRescanOrchestrator +{ + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + public LoggingRescanOrchestrator( + TimeProvider timeProvider, + ILogger logger) + { + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public Task TriggerRescanAsync( + UnknownSymbolDocument unknown, + RescanPriority priority, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(unknown); + + _logger.LogInformation( + "Rescan triggered for unknown {UnknownId} with priority {Priority} (Band={Band}, Score={Score:F2})", + unknown.Id, + priority, + unknown.Band, + unknown.Score); + + // Calculate next rescan time based on priority + var nextRescan = priority switch + { + RescanPriority.Immediate => _timeProvider.GetUtcNow().AddMinutes(15), + RescanPriority.Scheduled => _timeProvider.GetUtcNow().AddHours(24), + _ => _timeProvider.GetUtcNow().AddDays(7) + }; + + return Task.FromResult(new RescanResult( + unknown.Id, + Success: true, + NextScheduledRescan: nextRescan)); + } + + public Task TriggerBatchRescanAsync( + IReadOnlyList unknowns, + RescanPriority priority, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(unknowns); + + _logger.LogInformation( + "Batch rescan triggered for {Count} unknowns with priority {Priority}", + unknowns.Count, + priority); + + var results = new List(unknowns.Count); + var nextRescan = priority switch + { + RescanPriority.Immediate => _timeProvider.GetUtcNow().AddMinutes(15), + RescanPriority.Scheduled => _timeProvider.GetUtcNow().AddHours(24), + _ => _timeProvider.GetUtcNow().AddDays(7) + }; + + foreach (var unknown in unknowns) + { + results.Add(new RescanResult( + unknown.Id, + Success: true, + NextScheduledRescan: nextRescan)); + } + + return Task.FromResult(new BatchRescanResult( + TotalRequested: unknowns.Count, + SuccessCount: unknowns.Count, + FailureCount: 0, + Results: results)); + } +} diff --git a/src/Signals/StellaOps.Signals/Services/UnknownsRescanMetrics.cs b/src/Signals/StellaOps.Signals/Services/UnknownsRescanMetrics.cs new file mode 100644 index 000000000..a8f3da10f --- /dev/null +++ b/src/Signals/StellaOps.Signals/Services/UnknownsRescanMetrics.cs @@ -0,0 +1,107 @@ +using System.Diagnostics.Metrics; + +namespace StellaOps.Signals.Services; + +/// +/// Metrics for unknowns rescan operations and band distribution. +/// +internal static class UnknownsRescanMetrics +{ + private static readonly Meter Meter = new("StellaOps.Signals.Rescan", "1.0.0"); + + // ===== RESCAN COUNTERS ===== + + public static readonly Counter RescansTriggered = Meter.CreateCounter( + "stellaops_unknowns_rescans_triggered_total", + description: "Total rescans triggered by band"); + + public static readonly Counter RescansSucceeded = Meter.CreateCounter( + "stellaops_unknowns_rescans_succeeded_total", + description: "Total successful rescans by band"); + + public static readonly Counter RescansFailed = Meter.CreateCounter( + "stellaops_unknowns_rescans_failed_total", + description: "Total failed rescans by band"); + + // ===== BATCH COUNTERS ===== + + public static readonly Counter HotBatchesProcessed = Meter.CreateCounter( + "stellaops_unknowns_hot_batches_processed_total", + description: "Total HOT band batch processing cycles"); + + public static readonly Counter WarmBatchesProcessed = Meter.CreateCounter( + "stellaops_unknowns_warm_batches_processed_total", + description: "Total WARM band batch processing cycles"); + + public static readonly Counter ColdBatchesProcessed = Meter.CreateCounter( + "stellaops_unknowns_cold_batches_processed_total", + description: "Total COLD band weekly batch runs"); + + // ===== TIMING HISTOGRAMS ===== + + public static readonly Histogram RescanDurationSeconds = Meter.CreateHistogram( + "stellaops_unknowns_rescan_duration_seconds", + unit: "s", + description: "Duration of individual rescan operations"); + + public static readonly Histogram BatchDurationSeconds = Meter.CreateHistogram( + "stellaops_unknowns_rescan_batch_duration_seconds", + unit: "s", + description: "Duration of rescan batch cycles"); + + // ===== BAND DISTRIBUTION ===== + + public static readonly ObservableGauge HotCount = Meter.CreateObservableGauge( + "stellaops_unknowns_band_hot_count", + () => _hotCount, + description: "Current count of HOT band unknowns"); + + public static readonly ObservableGauge WarmCount = Meter.CreateObservableGauge( + "stellaops_unknowns_band_warm_count", + () => _warmCount, + description: "Current count of WARM band unknowns"); + + public static readonly ObservableGauge ColdCount = Meter.CreateObservableGauge( + "stellaops_unknowns_band_cold_count", + () => _coldCount, + description: "Current count of COLD band unknowns"); + + // Band distribution state (updated by scoring service) + private static int _hotCount; + private static int _warmCount; + private static int _coldCount; + + /// + /// Updates the band distribution gauges. + /// + public static void SetBandDistribution(int hot, int warm, int cold) + { + Interlocked.Exchange(ref _hotCount, hot); + Interlocked.Exchange(ref _warmCount, warm); + Interlocked.Exchange(ref _coldCount, cold); + } + + /// + /// Records a rescan trigger with band tag. + /// + public static void RecordRescanTriggered(string band) + { + RescansTriggered.Add(1, new KeyValuePair("band", band)); + } + + /// + /// Records a successful rescan with band tag. + /// + public static void RecordRescanSuccess(string band) + { + RescansSucceeded.Add(1, new KeyValuePair("band", band)); + } + + /// + /// Records a failed rescan with band tag. + /// + public static void RecordRescanFailure(string band) + { + RescansFailed.Add(1, new KeyValuePair("band", band)); + } +} diff --git a/src/Signals/StellaOps.Signals/Services/UnknownsRescanWorker.cs b/src/Signals/StellaOps.Signals/Services/UnknownsRescanWorker.cs new file mode 100644 index 000000000..33e56d5c6 --- /dev/null +++ b/src/Signals/StellaOps.Signals/Services/UnknownsRescanWorker.cs @@ -0,0 +1,263 @@ +using System.Diagnostics; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Signals.Models; +using StellaOps.Signals.Options; +using StellaOps.Signals.Persistence; + +namespace StellaOps.Signals.Services; + +/// +/// Background worker that processes unknowns rescans based on band scheduling. +/// HOT items are processed immediately, WARM items on schedule, COLD items in weekly batches. +/// +public sealed class UnknownsRescanWorker : BackgroundService +{ + private readonly IUnknownsRepository _repository; + private readonly IRescanOrchestrator _orchestrator; + private readonly IOptions _options; + private readonly TimeProvider _timeProvider; + private readonly ILogger _logger; + + public UnknownsRescanWorker( + IUnknownsRepository repository, + IRescanOrchestrator orchestrator, + IOptions options, + TimeProvider timeProvider, + ILogger logger) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _orchestrator = orchestrator ?? throw new ArgumentNullException(nameof(orchestrator)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + var opts = _options.Value; + + if (!opts.Enabled) + { + _logger.LogInformation("Unknowns rescan worker is disabled."); + return; + } + + _logger.LogInformation( + "Unknowns rescan worker started. Poll interval: {PollInterval}", + opts.PollInterval); + + while (!stoppingToken.IsCancellationRequested) + { + try + { + await ProcessHotBandAsync(opts, stoppingToken).ConfigureAwait(false); + await ProcessWarmBandAsync(opts, stoppingToken).ConfigureAwait(false); + await ProcessColdBandAsync(opts, stoppingToken).ConfigureAwait(false); + } + catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) + { + break; + } + catch (Exception ex) + { + _logger.LogError(ex, "Error in unknowns rescan worker cycle."); + } + + try + { + await Task.Delay(opts.PollInterval, _timeProvider, stoppingToken).ConfigureAwait(false); + } + catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) + { + break; + } + } + + _logger.LogInformation("Unknowns rescan worker stopping."); + } + + private async Task ProcessHotBandAsync(UnknownsRescanOptions opts, CancellationToken cancellationToken) + { + var sw = Stopwatch.StartNew(); + var hotItems = await _repository.GetDueForRescanAsync( + UnknownsBand.Hot, + opts.HotBatchSize, + cancellationToken).ConfigureAwait(false); + + if (hotItems.Count == 0) + { + return; + } + + _logger.LogInformation( + "Processing {Count} HOT unknowns for immediate rescan.", + hotItems.Count); + + foreach (var item in hotItems) + { + UnknownsRescanMetrics.RecordRescanTriggered("hot"); + try + { + var result = await _orchestrator.TriggerRescanAsync( + item, + RescanPriority.Immediate, + cancellationToken).ConfigureAwait(false); + + if (result.Success) + { + UnknownsRescanMetrics.RecordRescanSuccess("hot"); + _logger.LogDebug( + "HOT unknown {UnknownId} rescan triggered successfully.", + item.Id); + } + else + { + UnknownsRescanMetrics.RecordRescanFailure("hot"); + _logger.LogWarning( + "HOT unknown {UnknownId} rescan failed: {Error}", + item.Id, + result.ErrorMessage); + } + } + catch (Exception ex) when (ex is not OperationCanceledException) + { + UnknownsRescanMetrics.RecordRescanFailure("hot"); + _logger.LogError(ex, "Failed to trigger rescan for HOT unknown {UnknownId}.", item.Id); + } + } + + sw.Stop(); + UnknownsRescanMetrics.HotBatchesProcessed.Add(1); + UnknownsRescanMetrics.BatchDurationSeconds.Record(sw.Elapsed.TotalSeconds, new KeyValuePair("band", "hot")); + } + + private async Task ProcessWarmBandAsync(UnknownsRescanOptions opts, CancellationToken cancellationToken) + { + var sw = Stopwatch.StartNew(); + var warmItems = await _repository.GetDueForRescanAsync( + UnknownsBand.Warm, + opts.WarmBatchSize, + cancellationToken).ConfigureAwait(false); + + if (warmItems.Count == 0) + { + return; + } + + _logger.LogInformation( + "Processing {Count} WARM unknowns for scheduled rescan.", + warmItems.Count); + + foreach (var item in warmItems) + { + UnknownsRescanMetrics.RecordRescanTriggered("warm"); + try + { + var result = await _orchestrator.TriggerRescanAsync( + item, + RescanPriority.Scheduled, + cancellationToken).ConfigureAwait(false); + + if (result.Success) + { + UnknownsRescanMetrics.RecordRescanSuccess("warm"); + _logger.LogDebug( + "WARM unknown {UnknownId} rescan scheduled.", + item.Id); + } + else + { + UnknownsRescanMetrics.RecordRescanFailure("warm"); + _logger.LogWarning( + "WARM unknown {UnknownId} rescan scheduling failed: {Error}", + item.Id, + result.ErrorMessage); + } + } + catch (Exception ex) when (ex is not OperationCanceledException) + { + UnknownsRescanMetrics.RecordRescanFailure("warm"); + _logger.LogError(ex, "Failed to schedule rescan for WARM unknown {UnknownId}.", item.Id); + } + } + + sw.Stop(); + UnknownsRescanMetrics.WarmBatchesProcessed.Add(1); + UnknownsRescanMetrics.BatchDurationSeconds.Record(sw.Elapsed.TotalSeconds, new KeyValuePair("band", "warm")); + } + + private async Task ProcessColdBandAsync(UnknownsRescanOptions opts, CancellationToken cancellationToken) + { + var now = _timeProvider.GetUtcNow(); + + // COLD items are processed in weekly batches on the configured day and hour + if (now.DayOfWeek != opts.ColdBatchDay) + { + return; + } + + // Only process once per day during the configured hour + if (now.Hour != opts.ColdBatchHourUtc) + { + return; + } + + var sw = Stopwatch.StartNew(); + var coldItems = await _repository.GetDueForRescanAsync( + UnknownsBand.Cold, + opts.ColdBatchSize, + cancellationToken).ConfigureAwait(false); + + if (coldItems.Count == 0) + { + _logger.LogDebug("No COLD unknowns due for weekly batch processing."); + return; + } + + _logger.LogInformation( + "Processing weekly COLD batch: {Count} unknowns.", + coldItems.Count); + + try + { + foreach (var item in coldItems) + { + UnknownsRescanMetrics.RecordRescanTriggered("cold"); + } + + var result = await _orchestrator.TriggerBatchRescanAsync( + coldItems, + RescanPriority.Batch, + cancellationToken).ConfigureAwait(false); + + // Record success/failure metrics + for (var i = 0; i < result.SuccessCount; i++) + { + UnknownsRescanMetrics.RecordRescanSuccess("cold"); + } + for (var i = 0; i < result.FailureCount; i++) + { + UnknownsRescanMetrics.RecordRescanFailure("cold"); + } + + _logger.LogInformation( + "COLD batch completed: {Success}/{Total} succeeded.", + result.SuccessCount, + result.TotalRequested); + } + catch (Exception ex) when (ex is not OperationCanceledException) + { + foreach (var item in coldItems) + { + UnknownsRescanMetrics.RecordRescanFailure("cold"); + } + _logger.LogError(ex, "Failed to process COLD batch rescan."); + } + + sw.Stop(); + UnknownsRescanMetrics.ColdBatchesProcessed.Add(1); + UnknownsRescanMetrics.BatchDurationSeconds.Record(sw.Elapsed.TotalSeconds, new KeyValuePair("band", "cold")); + } +} diff --git a/src/Signals/__Tests/StellaOps.Signals.Tests/ReachabilityScoringServiceTests.cs b/src/Signals/__Tests/StellaOps.Signals.Tests/ReachabilityScoringServiceTests.cs index 9601992b7..899d3e677 100644 --- a/src/Signals/__Tests/StellaOps.Signals.Tests/ReachabilityScoringServiceTests.cs +++ b/src/Signals/__Tests/StellaOps.Signals.Tests/ReachabilityScoringServiceTests.cs @@ -264,5 +264,30 @@ public class ReachabilityScoringServiceTests { return Task.FromResult(Stored.Count); } + + public Task BulkUpdateAsync(IEnumerable items, CancellationToken cancellationToken) + { + foreach (var item in items) + { + var existing = Stored.FindIndex(x => x.Id == item.Id); + if (existing >= 0) + Stored[existing] = item; + else + Stored.Add(item); + } + return Task.CompletedTask; + } + + public Task> GetAllSubjectKeysAsync(CancellationToken cancellationToken) + { + return Task.FromResult>( + Stored.Select(x => x.SubjectKey).Distinct().ToList()); + } + + public Task> GetDueForRescanAsync(UnknownsBand band, int limit, CancellationToken cancellationToken) + { + return Task.FromResult>( + Stored.Where(x => x.Band == band).Take(limit).ToList()); + } } } diff --git a/src/Signals/__Tests/StellaOps.Signals.Tests/UnknownsDecayServiceTests.cs b/src/Signals/__Tests/StellaOps.Signals.Tests/UnknownsDecayServiceTests.cs new file mode 100644 index 000000000..cef256df2 --- /dev/null +++ b/src/Signals/__Tests/StellaOps.Signals.Tests/UnknownsDecayServiceTests.cs @@ -0,0 +1,514 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using MsOptions = Microsoft.Extensions.Options; +using StellaOps.Signals.Models; +using StellaOps.Signals.Options; +using StellaOps.Signals.Persistence; +using StellaOps.Signals.Services; +using Xunit; + +namespace StellaOps.Signals.Tests; + +public class UnknownsDecayServiceTests +{ + private readonly MockTimeProvider _timeProvider; + private readonly InMemoryUnknownsRepository _unknownsRepo; + private readonly InMemoryDeploymentRefsRepository _deploymentRefs; + private readonly InMemoryGraphMetricsRepository _graphMetrics; + private readonly UnknownsScoringOptions _scoringOptions; + private readonly UnknownsDecayOptions _decayOptions; + + public UnknownsDecayServiceTests() + { + _timeProvider = new MockTimeProvider(new DateTimeOffset(2025, 12, 15, 12, 0, 0, TimeSpan.Zero)); + _unknownsRepo = new InMemoryUnknownsRepository(); + _deploymentRefs = new InMemoryDeploymentRefsRepository(); + _graphMetrics = new InMemoryGraphMetricsRepository(); + _scoringOptions = new UnknownsScoringOptions(); + _decayOptions = new UnknownsDecayOptions(); + } + + private (UnknownsDecayService DecayService, UnknownsScoringService ScoringService) CreateServices() + { + var scoringService = new UnknownsScoringService( + _unknownsRepo, + _deploymentRefs, + _graphMetrics, + MsOptions.Options.Create(_scoringOptions), + _timeProvider, + NullLogger.Instance); + + var decayService = new UnknownsDecayService( + _unknownsRepo, + scoringService, + MsOptions.Options.Create(_scoringOptions), + MsOptions.Options.Create(_decayOptions), + _timeProvider, + NullLogger.Instance); + + return (decayService, scoringService); + } + + #region ApplyDecayAsync Tests + + [Fact] + public async Task ApplyDecayAsync_EmptySubject_ReturnsZeroCounts() + { + var (decayService, _) = CreateServices(); + + var result = await decayService.ApplyDecayAsync("empty|1.0.0", CancellationToken.None); + + Assert.Equal("empty|1.0.0", result.SubjectKey); + Assert.Equal(0, result.ProcessedCount); + Assert.Equal(0, result.HotCount); + Assert.Equal(0, result.WarmCount); + Assert.Equal(0, result.ColdCount); + Assert.Equal(0, result.BandChanges); + } + + [Fact] + public async Task ApplyDecayAsync_SingleUnknown_UpdatesAndPersists() + { + var (decayService, _) = CreateServices(); + var now = _timeProvider.GetUtcNow(); + const string subjectKey = "test|1.0.0"; + + var unknown = new UnknownSymbolDocument + { + Id = "unknown-1", + SubjectKey = subjectKey, + LastAnalyzedAt = now.AddDays(-7), + Flags = new UnknownFlags(), + CreatedAt = now.AddDays(-10), + Band = UnknownsBand.Cold + }; + + await _unknownsRepo.UpsertAsync(subjectKey, new[] { unknown }, CancellationToken.None); + + var result = await decayService.ApplyDecayAsync(subjectKey, CancellationToken.None); + + Assert.Equal(1, result.ProcessedCount); + Assert.Equal(subjectKey, result.SubjectKey); + + // Verify the unknown was updated in the repository + var updated = await _unknownsRepo.GetBySubjectAsync(subjectKey, CancellationToken.None); + Assert.Single(updated); + Assert.True(updated[0].UpdatedAt >= now); + } + + [Fact] + public async Task ApplyDecayAsync_BandChangesTracked() + { + var (decayService, _) = CreateServices(); + var now = _timeProvider.GetUtcNow(); + const string subjectKey = "test|1.0.0"; + + // Create unknown that will change from COLD to HOT due to high staleness and flags + var unknown = new UnknownSymbolDocument + { + Id = "unknown-1", + SubjectKey = subjectKey, + LastAnalyzedAt = now.AddDays(-14), + Flags = new UnknownFlags + { + NoProvenanceAnchor = true, + VersionRange = true, + ConflictingFeeds = true, + MissingVector = true + }, + CreatedAt = now.AddDays(-20), + Band = UnknownsBand.Cold // Initially cold + }; + + _deploymentRefs.SetDeploymentCount("pkg:npm/test@1.0.0", 100); + await _unknownsRepo.UpsertAsync(subjectKey, new[] { unknown }, CancellationToken.None); + + var result = await decayService.ApplyDecayAsync(subjectKey, CancellationToken.None); + + // Band should have changed from COLD to HOT + if (result.HotCount > 0) + { + Assert.Equal(1, result.BandChanges); + } + } + + [Fact] + public async Task ApplyDecayAsync_MultipleUnknowns_ProcessesAll() + { + var (decayService, _) = CreateServices(); + var now = _timeProvider.GetUtcNow(); + const string subjectKey = "test|1.0.0"; + + var unknowns = new[] + { + new UnknownSymbolDocument + { + Id = "unknown-1", + SubjectKey = subjectKey, + LastAnalyzedAt = now, + Flags = new UnknownFlags(), + CreatedAt = now.AddDays(-1), + Band = UnknownsBand.Cold + }, + new UnknownSymbolDocument + { + Id = "unknown-2", + SubjectKey = subjectKey, + LastAnalyzedAt = now.AddDays(-7), + Flags = new UnknownFlags { NoProvenanceAnchor = true }, + CreatedAt = now.AddDays(-10), + Band = UnknownsBand.Warm + }, + new UnknownSymbolDocument + { + Id = "unknown-3", + SubjectKey = subjectKey, + LastAnalyzedAt = now.AddDays(-14), + Flags = new UnknownFlags { NoProvenanceAnchor = true, VersionRange = true }, + CreatedAt = now.AddDays(-20), + Band = UnknownsBand.Hot + } + }; + + await _unknownsRepo.UpsertAsync(subjectKey, unknowns, CancellationToken.None); + + var result = await decayService.ApplyDecayAsync(subjectKey, CancellationToken.None); + + Assert.Equal(3, result.ProcessedCount); + Assert.Equal(result.HotCount + result.WarmCount + result.ColdCount, result.ProcessedCount); + } + + #endregion + + #region RunNightlyDecayBatchAsync Tests + + [Fact] + public async Task RunNightlyDecayBatchAsync_ProcessesAllSubjects() + { + var (decayService, _) = CreateServices(); + var now = _timeProvider.GetUtcNow(); + + // Create unknowns in multiple subjects + await _unknownsRepo.UpsertAsync("subject-1|1.0.0", new[] + { + new UnknownSymbolDocument + { + Id = "u1", + SubjectKey = "subject-1|1.0.0", + LastAnalyzedAt = now.AddDays(-7), + Flags = new UnknownFlags(), + CreatedAt = now.AddDays(-10) + } + }, CancellationToken.None); + + await _unknownsRepo.UpsertAsync("subject-2|1.0.0", new[] + { + new UnknownSymbolDocument + { + Id = "u2", + SubjectKey = "subject-2|1.0.0", + LastAnalyzedAt = now.AddDays(-3), + Flags = new UnknownFlags(), + CreatedAt = now.AddDays(-5) + } + }, CancellationToken.None); + + var result = await decayService.RunNightlyDecayBatchAsync(CancellationToken.None); + + Assert.Equal(2, result.TotalSubjects); + Assert.Equal(2, result.TotalUnknowns); + Assert.True(result.Duration >= TimeSpan.Zero); + } + + [Fact] + public async Task RunNightlyDecayBatchAsync_RespectsMaxSubjectsLimit() + { + var decayOptions = new UnknownsDecayOptions { MaxSubjectsPerBatch = 1 }; + var scoringService = new UnknownsScoringService( + _unknownsRepo, + _deploymentRefs, + _graphMetrics, + MsOptions.Options.Create(_scoringOptions), + _timeProvider, + NullLogger.Instance); + + var decayService = new UnknownsDecayService( + _unknownsRepo, + scoringService, + MsOptions.Options.Create(_scoringOptions), + MsOptions.Options.Create(decayOptions), + _timeProvider, + NullLogger.Instance); + + var now = _timeProvider.GetUtcNow(); + + // Create unknowns in multiple subjects + await _unknownsRepo.UpsertAsync("subject-1|1.0.0", new[] + { + new UnknownSymbolDocument + { + Id = "u1", + SubjectKey = "subject-1|1.0.0", + LastAnalyzedAt = now.AddDays(-7), + Flags = new UnknownFlags(), + CreatedAt = now.AddDays(-10) + } + }, CancellationToken.None); + + await _unknownsRepo.UpsertAsync("subject-2|1.0.0", new[] + { + new UnknownSymbolDocument + { + Id = "u2", + SubjectKey = "subject-2|1.0.0", + LastAnalyzedAt = now.AddDays(-3), + Flags = new UnknownFlags(), + CreatedAt = now.AddDays(-5) + } + }, CancellationToken.None); + + var result = await decayService.RunNightlyDecayBatchAsync(CancellationToken.None); + + // Should only process 1 subject due to limit + Assert.Equal(1, result.TotalSubjects); + Assert.Equal(1, result.TotalUnknowns); + } + + [Fact] + public async Task RunNightlyDecayBatchAsync_CancellationRespected() + { + var (decayService, _) = CreateServices(); + var now = _timeProvider.GetUtcNow(); + + // Create unknowns in multiple subjects + for (int i = 0; i < 10; i++) + { + await _unknownsRepo.UpsertAsync($"subject-{i}|1.0.0", new[] + { + new UnknownSymbolDocument + { + Id = $"u{i}", + SubjectKey = $"subject-{i}|1.0.0", + LastAnalyzedAt = now.AddDays(-7), + Flags = new UnknownFlags(), + CreatedAt = now.AddDays(-10) + } + }, CancellationToken.None); + } + + using var cts = new CancellationTokenSource(); + cts.Cancel(); + + await Assert.ThrowsAsync(() => + decayService.RunNightlyDecayBatchAsync(cts.Token)); + } + + #endregion + + #region ApplyDecayToUnknownAsync Tests + + [Fact] + public async Task ApplyDecayToUnknownAsync_UpdatesScoringFields() + { + var (decayService, _) = CreateServices(); + var now = _timeProvider.GetUtcNow(); + + var unknown = new UnknownSymbolDocument + { + Id = "unknown-1", + SubjectKey = "test|1.0.0", + Purl = "pkg:npm/test@1.0.0", + LastAnalyzedAt = now.AddDays(-7), + Flags = new UnknownFlags { NoProvenanceAnchor = true }, + CreatedAt = now.AddDays(-10), + Score = 0, + Band = UnknownsBand.Cold + }; + + _deploymentRefs.SetDeploymentCount("pkg:npm/test@1.0.0", 50); + + var result = await decayService.ApplyDecayToUnknownAsync(unknown, CancellationToken.None); + + // Verify scoring fields were updated + Assert.True(result.Score > 0); + Assert.True(result.PopularityScore > 0); + Assert.True(result.StalenessScore > 0); + Assert.True(result.UncertaintyScore > 0); + Assert.NotNull(result.NextScheduledRescan); + Assert.NotNull(result.NormalizationTrace); + } + + [Fact] + public async Task ApplyDecayToUnknownAsync_SetsNextRescanBasedOnBand() + { + var (decayService, _) = CreateServices(); + var now = _timeProvider.GetUtcNow(); + + // Create unknown that will be scored as COLD + var coldUnknown = new UnknownSymbolDocument + { + Id = "cold-unknown", + SubjectKey = "test|1.0.0", + LastAnalyzedAt = now, // Fresh + Flags = new UnknownFlags(), + CreatedAt = now.AddDays(-1) + }; + + var result = await decayService.ApplyDecayToUnknownAsync(coldUnknown, CancellationToken.None); + + Assert.Equal(UnknownsBand.Cold, result.Band); + Assert.Equal(now.AddDays(_scoringOptions.ColdRescanDays), result.NextScheduledRescan); + } + + #endregion + + #region Decay Result Aggregation Tests + + [Fact] + public async Task ApplyDecayAsync_ResultCountsAreAccurate() + { + var (decayService, _) = CreateServices(); + var now = _timeProvider.GetUtcNow(); + const string subjectKey = "test|1.0.0"; + + // Create unknowns that will end up in different bands + var unknowns = new List(); + + // This will be COLD (fresh, no flags) + unknowns.Add(new UnknownSymbolDocument + { + Id = "cold-1", + SubjectKey = subjectKey, + LastAnalyzedAt = now, + Flags = new UnknownFlags(), + CreatedAt = now.AddDays(-1) + }); + + // Add more with varying staleness and flags + for (int i = 0; i < 5; i++) + { + unknowns.Add(new UnknownSymbolDocument + { + Id = $"unknown-{i}", + SubjectKey = subjectKey, + LastAnalyzedAt = now.AddDays(-i * 2), + Flags = new UnknownFlags + { + NoProvenanceAnchor = i > 2, + VersionRange = i > 3 + }, + CreatedAt = now.AddDays(-i * 2 - 5) + }); + } + + await _unknownsRepo.UpsertAsync(subjectKey, unknowns, CancellationToken.None); + + var result = await decayService.ApplyDecayAsync(subjectKey, CancellationToken.None); + + Assert.Equal(6, result.ProcessedCount); + Assert.Equal(6, result.HotCount + result.WarmCount + result.ColdCount); + Assert.True(result.ColdCount >= 1); // At least the fresh one should be cold + } + + #endregion + + #region Test Infrastructure + + private sealed class MockTimeProvider : TimeProvider + { + private DateTimeOffset _now; + + public MockTimeProvider(DateTimeOffset now) => _now = now; + + public override DateTimeOffset GetUtcNow() => _now; + + public void Advance(TimeSpan duration) => _now = _now.Add(duration); + } + + private sealed class InMemoryUnknownsRepository : IUnknownsRepository + { + private readonly List _stored = new(); + + public Task UpsertAsync(string subjectKey, IEnumerable items, CancellationToken cancellationToken) + { + _stored.RemoveAll(x => x.SubjectKey == subjectKey); + _stored.AddRange(items); + return Task.CompletedTask; + } + + public Task> GetBySubjectAsync(string subjectKey, CancellationToken cancellationToken) + { + return Task.FromResult>( + _stored.Where(x => x.SubjectKey == subjectKey).ToList()); + } + + public Task CountBySubjectAsync(string subjectKey, CancellationToken cancellationToken) + { + return Task.FromResult(_stored.Count(x => x.SubjectKey == subjectKey)); + } + + public Task BulkUpdateAsync(IEnumerable items, CancellationToken cancellationToken) + { + foreach (var item in items) + { + var existing = _stored.FindIndex(x => x.Id == item.Id); + if (existing >= 0) + _stored[existing] = item; + else + _stored.Add(item); + } + return Task.CompletedTask; + } + + public Task> GetAllSubjectKeysAsync(CancellationToken cancellationToken) + { + return Task.FromResult>( + _stored.Select(x => x.SubjectKey).Distinct().ToList()); + } + + public Task> GetDueForRescanAsync(UnknownsBand band, int limit, CancellationToken cancellationToken) + { + return Task.FromResult>( + _stored.Where(x => x.Band == band).Take(limit).ToList()); + } + } + + private sealed class InMemoryDeploymentRefsRepository : IDeploymentRefsRepository + { + private readonly Dictionary _counts = new(); + + public void SetDeploymentCount(string purl, int count) => _counts[purl] = count; + + public Task CountDeploymentsAsync(string purl, CancellationToken cancellationToken) + { + return Task.FromResult(_counts.TryGetValue(purl, out var count) ? count : 0); + } + + public Task> GetDeploymentIdsAsync(string purl, int limit, CancellationToken cancellationToken) + { + return Task.FromResult>(Array.Empty()); + } + } + + private sealed class InMemoryGraphMetricsRepository : IGraphMetricsRepository + { + private readonly Dictionary _metrics = new(); + + public void SetMetrics(string symbolId, string callgraphId, GraphMetrics metrics) + { + _metrics[$"{symbolId}:{callgraphId}"] = metrics; + } + + public Task GetMetricsAsync(string symbolId, string callgraphId, CancellationToken cancellationToken) + { + _metrics.TryGetValue($"{symbolId}:{callgraphId}", out var metrics); + return Task.FromResult(metrics); + } + } + + #endregion +} diff --git a/src/Signals/__Tests/StellaOps.Signals.Tests/UnknownsIngestionServiceTests.cs b/src/Signals/__Tests/StellaOps.Signals.Tests/UnknownsIngestionServiceTests.cs index 4eec20997..4d0f0cea0 100644 --- a/src/Signals/__Tests/StellaOps.Signals.Tests/UnknownsIngestionServiceTests.cs +++ b/src/Signals/__Tests/StellaOps.Signals.Tests/UnknownsIngestionServiceTests.cs @@ -78,5 +78,30 @@ public class UnknownsIngestionServiceTests { return Task.FromResult(Stored.Count); } + + public Task BulkUpdateAsync(IEnumerable items, CancellationToken cancellationToken) + { + foreach (var item in items) + { + var existing = Stored.FindIndex(x => x.Id == item.Id); + if (existing >= 0) + Stored[existing] = item; + else + Stored.Add(item); + } + return Task.CompletedTask; + } + + public Task> GetAllSubjectKeysAsync(CancellationToken cancellationToken) + { + return Task.FromResult>( + Stored.Select(x => x.SubjectKey).Distinct().ToList()); + } + + public Task> GetDueForRescanAsync(UnknownsBand band, int limit, CancellationToken cancellationToken) + { + return Task.FromResult>( + Stored.Where(x => x.Band == band).Take(limit).ToList()); + } } } diff --git a/src/Signals/__Tests/StellaOps.Signals.Tests/UnknownsScoringServiceTests.cs b/src/Signals/__Tests/StellaOps.Signals.Tests/UnknownsScoringServiceTests.cs new file mode 100644 index 000000000..e6b801845 --- /dev/null +++ b/src/Signals/__Tests/StellaOps.Signals.Tests/UnknownsScoringServiceTests.cs @@ -0,0 +1,534 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using MsOptions = Microsoft.Extensions.Options; +using StellaOps.Signals.Models; +using StellaOps.Signals.Options; +using StellaOps.Signals.Persistence; +using StellaOps.Signals.Services; +using Xunit; + +namespace StellaOps.Signals.Tests; + +public class UnknownsScoringServiceTests +{ + private readonly MockTimeProvider _timeProvider; + private readonly InMemoryUnknownsRepository _unknownsRepo; + private readonly InMemoryDeploymentRefsRepository _deploymentRefs; + private readonly InMemoryGraphMetricsRepository _graphMetrics; + private readonly UnknownsScoringOptions _defaultOptions; + + public UnknownsScoringServiceTests() + { + _timeProvider = new MockTimeProvider(new DateTimeOffset(2025, 12, 15, 12, 0, 0, TimeSpan.Zero)); + _unknownsRepo = new InMemoryUnknownsRepository(); + _deploymentRefs = new InMemoryDeploymentRefsRepository(); + _graphMetrics = new InMemoryGraphMetricsRepository(); + _defaultOptions = new UnknownsScoringOptions(); + } + + private UnknownsScoringService CreateService(UnknownsScoringOptions? options = null) + { + return new UnknownsScoringService( + _unknownsRepo, + _deploymentRefs, + _graphMetrics, + MsOptions.Options.Create(options ?? _defaultOptions), + _timeProvider, + NullLogger.Instance); + } + + #region Staleness Exponential Decay Tests + + [Fact] + public async Task ScoreUnknown_ExponentialDecay_FreshEvidence_LowStaleness() + { + // Fresh evidence (analyzed today) should have low staleness + var service = CreateService(); + var now = _timeProvider.GetUtcNow(); + + var unknown = new UnknownSymbolDocument + { + Id = "unknown-1", + SubjectKey = "test|1.0.0", + LastAnalyzedAt = now, // Just analyzed + Flags = new UnknownFlags(), + CreatedAt = now.AddDays(-10) + }; + + var scored = await service.ScoreUnknownAsync(unknown, _defaultOptions, CancellationToken.None); + + // Staleness should be close to 0 for fresh evidence + Assert.True(scored.StalenessScore < 0.05, $"Expected staleness < 0.05, got {scored.StalenessScore}"); + Assert.Equal(0, scored.DaysSinceLastAnalysis); + } + + [Fact] + public async Task ScoreUnknown_ExponentialDecay_StaleEvidence_HighStaleness() + { + // Old evidence (14 days) should have high staleness + var service = CreateService(); + var now = _timeProvider.GetUtcNow(); + + var unknown = new UnknownSymbolDocument + { + Id = "unknown-2", + SubjectKey = "test|1.0.0", + LastAnalyzedAt = now.AddDays(-14), // 14 days old (tau default) + Flags = new UnknownFlags(), + CreatedAt = now.AddDays(-20) + }; + + var scored = await service.ScoreUnknownAsync(unknown, _defaultOptions, CancellationToken.None); + + // At t = tau, staleness should be significant (normalized based on exponential decay) + Assert.True(scored.StalenessScore > 0.5, $"Expected staleness > 0.5 at tau, got {scored.StalenessScore}"); + Assert.Equal(14, scored.DaysSinceLastAnalysis); + } + + [Fact] + public async Task ScoreUnknown_ExponentialDecay_NeverAnalyzed_MaxStaleness() + { + // Never analyzed should have maximum staleness + var service = CreateService(); + var now = _timeProvider.GetUtcNow(); + + var unknown = new UnknownSymbolDocument + { + Id = "unknown-3", + SubjectKey = "test|1.0.0", + LastAnalyzedAt = null, // Never analyzed + Flags = new UnknownFlags(), + CreatedAt = now.AddDays(-30) + }; + + var scored = await service.ScoreUnknownAsync(unknown, _defaultOptions, CancellationToken.None); + + // Never analyzed = maximum staleness (1.0) + Assert.Equal(1.0, scored.StalenessScore); + Assert.Equal(_defaultOptions.StalenessMaxDays, scored.DaysSinceLastAnalysis); + } + + [Theory] + [InlineData(0, 0.0)] // Fresh + [InlineData(7, 0.35)] // Half tau - moderate staleness + [InlineData(14, 0.70)] // At tau - significant staleness + [InlineData(28, 0.95)] // 2x tau - near max staleness + public async Task ScoreUnknown_ExponentialDecay_VerifyFormula(int daysOld, double expectedMinStaleness) + { + var service = CreateService(); + var now = _timeProvider.GetUtcNow(); + + var unknown = new UnknownSymbolDocument + { + Id = $"unknown-{daysOld}", + SubjectKey = "test|1.0.0", + LastAnalyzedAt = now.AddDays(-daysOld), + Flags = new UnknownFlags(), + CreatedAt = now.AddDays(-daysOld - 5) + }; + + var scored = await service.ScoreUnknownAsync(unknown, _defaultOptions, CancellationToken.None); + + // Staleness should be at least the expected minimum + Assert.True(scored.StalenessScore >= expectedMinStaleness * 0.8, + $"At {daysOld} days, expected staleness >= {expectedMinStaleness * 0.8}, got {scored.StalenessScore}"); + Assert.Equal(daysOld, scored.DaysSinceLastAnalysis); + } + + #endregion + + #region Band Assignment Tests + + [Fact] + public async Task ScoreUnknown_BandAssignment_HotThreshold() + { + // High score should assign HOT band + var service = CreateService(); + var now = _timeProvider.GetUtcNow(); + + // Create unknown with high uncertainty flags to boost score + var unknown = new UnknownSymbolDocument + { + Id = "hot-unknown", + SubjectKey = "test|1.0.0", + Purl = "pkg:npm/test@1.0.0", + LastAnalyzedAt = now.AddDays(-14), + Flags = new UnknownFlags + { + NoProvenanceAnchor = true, // +0.30 + VersionRange = true, // +0.25 + ConflictingFeeds = true, // +0.20 + MissingVector = true // +0.15 + }, + CreatedAt = now.AddDays(-20) + }; + + // Set up deployments for popularity + _deploymentRefs.SetDeploymentCount("pkg:npm/test@1.0.0", 100); + + var scored = await service.ScoreUnknownAsync(unknown, _defaultOptions, CancellationToken.None); + + // With high uncertainty (1.0) and high staleness, weighted score should hit HOT + Assert.Equal(UnknownsBand.Hot, scored.Band); + Assert.True(scored.Score >= _defaultOptions.HotThreshold, + $"Expected score >= {_defaultOptions.HotThreshold} for HOT, got {scored.Score}"); + } + + [Fact] + public async Task ScoreUnknown_BandAssignment_WarmThreshold() + { + var service = CreateService(); + var now = _timeProvider.GetUtcNow(); + + // Create unknown with moderate factors + var unknown = new UnknownSymbolDocument + { + Id = "warm-unknown", + SubjectKey = "test|1.0.0", + Purl = "pkg:npm/test@1.0.0", + LastAnalyzedAt = now.AddDays(-7), + Flags = new UnknownFlags + { + NoProvenanceAnchor = true, // +0.30 + VersionRange = true // +0.25 + }, + CreatedAt = now.AddDays(-10) + }; + + _deploymentRefs.SetDeploymentCount("pkg:npm/test@1.0.0", 50); + + var scored = await service.ScoreUnknownAsync(unknown, _defaultOptions, CancellationToken.None); + + // Should be in WARM band + Assert.Equal(UnknownsBand.Warm, scored.Band); + Assert.True(scored.Score >= _defaultOptions.WarmThreshold, + $"Expected score >= {_defaultOptions.WarmThreshold} for WARM, got {scored.Score}"); + Assert.True(scored.Score < _defaultOptions.HotThreshold, + $"Expected score < {_defaultOptions.HotThreshold} for WARM, got {scored.Score}"); + } + + [Fact] + public async Task ScoreUnknown_BandAssignment_ColdThreshold() + { + var service = CreateService(); + var now = _timeProvider.GetUtcNow(); + + // Create unknown with low factors + var unknown = new UnknownSymbolDocument + { + Id = "cold-unknown", + SubjectKey = "test|1.0.0", + Purl = "pkg:npm/test@1.0.0", + LastAnalyzedAt = now, // Fresh evidence + Flags = new UnknownFlags(), // No flags + CreatedAt = now.AddDays(-1) + }; + + _deploymentRefs.SetDeploymentCount("pkg:npm/test@1.0.0", 1); + + var scored = await service.ScoreUnknownAsync(unknown, _defaultOptions, CancellationToken.None); + + // Should be in COLD band with minimal factors + Assert.Equal(UnknownsBand.Cold, scored.Band); + Assert.True(scored.Score < _defaultOptions.WarmThreshold, + $"Expected score < {_defaultOptions.WarmThreshold} for COLD, got {scored.Score}"); + } + + [Fact] + public async Task ScoreUnknown_BandAssignment_CustomThresholds() + { + var customOptions = new UnknownsScoringOptions + { + HotThreshold = 0.80, + WarmThreshold = 0.50 + }; + + var service = CreateService(customOptions); + var now = _timeProvider.GetUtcNow(); + + var unknown = new UnknownSymbolDocument + { + Id = "custom-unknown", + SubjectKey = "test|1.0.0", + LastAnalyzedAt = now.AddDays(-7), + Flags = new UnknownFlags + { + NoProvenanceAnchor = true, + VersionRange = true + }, + CreatedAt = now.AddDays(-10) + }; + + var scored = await service.ScoreUnknownAsync(unknown, customOptions, CancellationToken.None); + + // With custom thresholds, verify correct band assignment + if (scored.Score >= 0.80) + Assert.Equal(UnknownsBand.Hot, scored.Band); + else if (scored.Score >= 0.50) + Assert.Equal(UnknownsBand.Warm, scored.Band); + else + Assert.Equal(UnknownsBand.Cold, scored.Band); + } + + #endregion + + #region Weight Formula Tests + + [Fact] + public async Task ScoreUnknown_WeightedFormula_VerifyComponents() + { + var service = CreateService(); + var now = _timeProvider.GetUtcNow(); + + var unknown = new UnknownSymbolDocument + { + Id = "formula-test", + SubjectKey = "test|1.0.0", + Purl = "pkg:npm/test@1.0.0", + SymbolId = "sym-1", + CallgraphId = "cg-1", + LastAnalyzedAt = now.AddDays(-7), + Flags = new UnknownFlags { NoProvenanceAnchor = true }, + CreatedAt = now.AddDays(-10) + }; + + _deploymentRefs.SetDeploymentCount("pkg:npm/test@1.0.0", 50); + _graphMetrics.SetMetrics("sym-1", "cg-1", new GraphMetrics(Degree: 10, Betweenness: 500.0)); + + var scored = await service.ScoreUnknownAsync(unknown, _defaultOptions, CancellationToken.None); + + // Verify normalization trace captures all weights + Assert.NotNull(scored.NormalizationTrace); + Assert.Equal(_defaultOptions.WeightPopularity, scored.NormalizationTrace!.Weights["wP"]); + Assert.Equal(_defaultOptions.WeightExploitPotential, scored.NormalizationTrace.Weights["wE"]); + Assert.Equal(_defaultOptions.WeightUncertainty, scored.NormalizationTrace.Weights["wU"]); + Assert.Equal(_defaultOptions.WeightCentrality, scored.NormalizationTrace.Weights["wC"]); + Assert.Equal(_defaultOptions.WeightStaleness, scored.NormalizationTrace.Weights["wS"]); + + // Verify individual scores are in valid range + Assert.InRange(scored.PopularityScore, 0.0, 1.0); + Assert.InRange(scored.ExploitPotentialScore, 0.0, 1.0); + Assert.InRange(scored.UncertaintyScore, 0.0, 1.0); + Assert.InRange(scored.CentralityScore, 0.0, 1.0); + Assert.InRange(scored.StalenessScore, 0.0, 1.0); + + // Verify final score is clamped + Assert.InRange(scored.Score, 0.0, 1.0); + } + + [Fact] + public async Task ScoreUnknown_WeightedFormula_WeightsSumToOne() + { + // Verify default weights sum to 1.0 + var sum = _defaultOptions.WeightPopularity + + _defaultOptions.WeightExploitPotential + + _defaultOptions.WeightUncertainty + + _defaultOptions.WeightCentrality + + _defaultOptions.WeightStaleness; + + Assert.Equal(1.0, sum, 5); + } + + #endregion + + #region Rescan Scheduling Tests + + [Fact] + public async Task ScoreUnknown_RescanScheduling_HotBand() + { + var service = CreateService(); + var now = _timeProvider.GetUtcNow(); + + var unknown = new UnknownSymbolDocument + { + Id = "hot-rescan", + SubjectKey = "test|1.0.0", + LastAnalyzedAt = now.AddDays(-14), + Flags = new UnknownFlags + { + NoProvenanceAnchor = true, + VersionRange = true, + ConflictingFeeds = true, + MissingVector = true + }, + CreatedAt = now.AddDays(-20) + }; + + _deploymentRefs.SetDeploymentCount("pkg:npm/test@1.0.0", 100); + + var scored = await service.ScoreUnknownAsync(unknown, _defaultOptions, CancellationToken.None); + + if (scored.Band == UnknownsBand.Hot) + { + var expectedRescan = now.AddMinutes(_defaultOptions.HotRescanMinutes); + Assert.Equal(expectedRescan, scored.NextScheduledRescan); + } + } + + [Fact] + public async Task ScoreUnknown_RescanScheduling_ColdBand() + { + var service = CreateService(); + var now = _timeProvider.GetUtcNow(); + + var unknown = new UnknownSymbolDocument + { + Id = "cold-rescan", + SubjectKey = "test|1.0.0", + LastAnalyzedAt = now, + Flags = new UnknownFlags(), + CreatedAt = now.AddDays(-1) + }; + + var scored = await service.ScoreUnknownAsync(unknown, _defaultOptions, CancellationToken.None); + + Assert.Equal(UnknownsBand.Cold, scored.Band); + var expectedRescan = now.AddDays(_defaultOptions.ColdRescanDays); + Assert.Equal(expectedRescan, scored.NextScheduledRescan); + } + + #endregion + + #region Determinism Tests + + [Fact] + public async Task ScoreUnknown_Determinism_SameInputsSameOutput() + { + var service = CreateService(); + var now = _timeProvider.GetUtcNow(); + + var unknown1 = new UnknownSymbolDocument + { + Id = "determinism-1", + SubjectKey = "test|1.0.0", + Purl = "pkg:npm/test@1.0.0", + LastAnalyzedAt = now.AddDays(-7), + Flags = new UnknownFlags { NoProvenanceAnchor = true }, + CreatedAt = now.AddDays(-10) + }; + + var unknown2 = new UnknownSymbolDocument + { + Id = "determinism-2", + SubjectKey = "test|1.0.0", + Purl = "pkg:npm/test@1.0.0", + LastAnalyzedAt = now.AddDays(-7), + Flags = new UnknownFlags { NoProvenanceAnchor = true }, + CreatedAt = now.AddDays(-10) + }; + + _deploymentRefs.SetDeploymentCount("pkg:npm/test@1.0.0", 50); + + var scored1 = await service.ScoreUnknownAsync(unknown1, _defaultOptions, CancellationToken.None); + var scored2 = await service.ScoreUnknownAsync(unknown2, _defaultOptions, CancellationToken.None); + + // Same inputs must produce identical scores + Assert.Equal(scored1.Score, scored2.Score); + Assert.Equal(scored1.Band, scored2.Band); + Assert.Equal(scored1.PopularityScore, scored2.PopularityScore); + Assert.Equal(scored1.StalenessScore, scored2.StalenessScore); + Assert.Equal(scored1.UncertaintyScore, scored2.UncertaintyScore); + } + + #endregion + + #region Test Infrastructure + + private sealed class MockTimeProvider : TimeProvider + { + private DateTimeOffset _now; + + public MockTimeProvider(DateTimeOffset now) => _now = now; + + public override DateTimeOffset GetUtcNow() => _now; + + public void Advance(TimeSpan duration) => _now = _now.Add(duration); + } + + private sealed class InMemoryUnknownsRepository : IUnknownsRepository + { + private readonly List _stored = new(); + + public Task UpsertAsync(string subjectKey, IEnumerable items, CancellationToken cancellationToken) + { + _stored.RemoveAll(x => x.SubjectKey == subjectKey); + _stored.AddRange(items); + return Task.CompletedTask; + } + + public Task> GetBySubjectAsync(string subjectKey, CancellationToken cancellationToken) + { + return Task.FromResult>( + _stored.Where(x => x.SubjectKey == subjectKey).ToList()); + } + + public Task CountBySubjectAsync(string subjectKey, CancellationToken cancellationToken) + { + return Task.FromResult(_stored.Count(x => x.SubjectKey == subjectKey)); + } + + public Task BulkUpdateAsync(IEnumerable items, CancellationToken cancellationToken) + { + foreach (var item in items) + { + var existing = _stored.FindIndex(x => x.Id == item.Id); + if (existing >= 0) + _stored[existing] = item; + else + _stored.Add(item); + } + return Task.CompletedTask; + } + + public Task> GetAllSubjectKeysAsync(CancellationToken cancellationToken) + { + return Task.FromResult>( + _stored.Select(x => x.SubjectKey).Distinct().ToList()); + } + + public Task> GetDueForRescanAsync(UnknownsBand band, int limit, CancellationToken cancellationToken) + { + return Task.FromResult>( + _stored.Where(x => x.Band == band).Take(limit).ToList()); + } + } + + private sealed class InMemoryDeploymentRefsRepository : IDeploymentRefsRepository + { + private readonly Dictionary _counts = new(); + + public void SetDeploymentCount(string purl, int count) => _counts[purl] = count; + + public Task CountDeploymentsAsync(string purl, CancellationToken cancellationToken) + { + return Task.FromResult(_counts.TryGetValue(purl, out var count) ? count : 0); + } + + public Task> GetDeploymentIdsAsync(string purl, int limit, CancellationToken cancellationToken) + { + return Task.FromResult>(Array.Empty()); + } + } + + private sealed class InMemoryGraphMetricsRepository : IGraphMetricsRepository + { + private readonly Dictionary _metrics = new(); + + public void SetMetrics(string symbolId, string callgraphId, GraphMetrics metrics) + { + _metrics[$"{symbolId}:{callgraphId}"] = metrics; + } + + public Task GetMetricsAsync(string symbolId, string callgraphId, CancellationToken cancellationToken) + { + _metrics.TryGetValue($"{symbolId}:{callgraphId}", out var metrics); + return Task.FromResult(metrics); + } + } + + #endregion +} diff --git a/src/Telemetry/StellaOps.Telemetry.Core/StellaOps.Telemetry.Core/Triage/TriageMetrics.cs b/src/Telemetry/StellaOps.Telemetry.Core/StellaOps.Telemetry.Core/Triage/TriageMetrics.cs new file mode 100644 index 000000000..eaf550290 --- /dev/null +++ b/src/Telemetry/StellaOps.Telemetry.Core/StellaOps.Telemetry.Core/Triage/TriageMetrics.cs @@ -0,0 +1,183 @@ +using System.Diagnostics.Metrics; + +namespace StellaOps.Telemetry.Core.Triage; + +/// +/// Metrics for triage workflow observability (TTFS, clicks-to-closure, evidence completeness). +/// +public static class TriageMetrics +{ + /// + /// Meter name for triage metrics. + /// + public const string MeterName = "StellaOps.Triage"; + + private static readonly Meter Meter = new(MeterName, "1.0.0"); + + // TTFS Metrics + + /// + /// Time to skeleton UI render in seconds. + /// + public static readonly Histogram TtfsSkeletonSeconds = Meter.CreateHistogram( + "stellaops_ttfs_skeleton_seconds", + unit: "s", + description: "Time to skeleton UI render"); + + /// + /// Time to first evidence pill (primary TTFS metric) in seconds. + /// + public static readonly Histogram TtfsFirstEvidenceSeconds = Meter.CreateHistogram( + "stellaops_ttfs_first_evidence_seconds", + unit: "s", + description: "Time to first evidence pill (primary TTFS)"); + + /// + /// Time to full evidence load in seconds. + /// + public static readonly Histogram TtfsFullEvidenceSeconds = Meter.CreateHistogram( + "stellaops_ttfs_full_evidence_seconds", + unit: "s", + description: "Time to full evidence load"); + + // Clicks-to-Closure + + /// + /// Interactions required to complete triage decision. + /// + public static readonly Histogram ClicksToClosure = Meter.CreateHistogram( + "stellaops_clicks_to_closure", + unit: "{clicks}", + description: "Interactions required to complete triage decision"); + + // Evidence Completeness + + /// + /// Evidence completeness at decision time (0-4). + /// + public static readonly Histogram EvidenceCompleteness = Meter.CreateHistogram( + "stellaops_evidence_completeness_score", + unit: "{score}", + description: "Evidence completeness at decision time (0-4)"); + + /// + /// Count of evidence available by type at decision time. + /// + public static readonly Counter EvidenceByType = Meter.CreateCounter( + "stellaops_evidence_available_total", + description: "Count of evidence available by type at decision time"); + + // Decision Metrics + + /// + /// Total triage decisions recorded. + /// + public static readonly Counter DecisionsTotal = Meter.CreateCounter( + "stellaops_triage_decisions_total", + description: "Total triage decisions recorded"); + + /// + /// Total time from alert open to decision in seconds. + /// + public static readonly Histogram DecisionDurationSeconds = Meter.CreateHistogram( + "stellaops_triage_decision_duration_seconds", + unit: "s", + description: "Total time from alert open to decision"); + + // Budget Violations + + /// + /// Count of performance budget violations. + /// + public static readonly Counter BudgetViolations = Meter.CreateCounter( + "stellaops_performance_budget_violations_total", + description: "Count of performance budget violations"); +} + +/// +/// Evidence bitset for completeness tracking (C# equivalent). +/// +public readonly struct EvidenceBitset +{ + /// + /// Reachability evidence bit (1). + /// + public const int Reachability = 1 << 0; + + /// + /// Callstack evidence bit (2). + /// + public const int Callstack = 1 << 1; + + /// + /// Provenance evidence bit (4). + /// + public const int Provenance = 1 << 2; + + /// + /// VEX evidence bit (8). + /// + public const int Vex = 1 << 3; + + /// + /// Gets the bitset value. + /// + public int Value { get; } + + /// + /// Initializes a new EvidenceBitset with the specified value. + /// + public EvidenceBitset(int value) + { + Value = value; + } + + /// + /// Gets whether reachability evidence is present. + /// + public bool HasReachability => (Value & Reachability) != 0; + + /// + /// Gets whether callstack evidence is present. + /// + public bool HasCallstack => (Value & Callstack) != 0; + + /// + /// Gets whether provenance evidence is present. + /// + public bool HasProvenance => (Value & Provenance) != 0; + + /// + /// Gets whether VEX evidence is present. + /// + public bool HasVex => (Value & Vex) != 0; + + /// + /// Gets the completeness score (0-4). + /// + public int CompletenessScore + { + get + { + int score = 0; + if (HasReachability) score++; + if (HasCallstack) score++; + if (HasProvenance) score++; + if (HasVex) score++; + return score; + } + } + + /// + /// Creates an EvidenceBitset from individual evidence flags. + /// + public static EvidenceBitset From(bool reachability, bool callstack, bool provenance, bool vex) + { + int value = 0; + if (reachability) value |= Reachability; + if (callstack) value |= Callstack; + if (provenance) value |= Provenance; + if (vex) value |= Vex; + return new EvidenceBitset(value); + } +} diff --git a/src/Telemetry/StellaOps.Telemetry.Core/StellaOps.Telemetry.Core/Triage/TtfsEvent.cs b/src/Telemetry/StellaOps.Telemetry.Core/StellaOps.Telemetry.Core/Triage/TtfsEvent.cs new file mode 100644 index 000000000..3bec8e59a --- /dev/null +++ b/src/Telemetry/StellaOps.Telemetry.Core/StellaOps.Telemetry.Core/Triage/TtfsEvent.cs @@ -0,0 +1,130 @@ +using System.Text.Json.Serialization; + +namespace StellaOps.Telemetry.Core.Triage; + +/// +/// TTFS telemetry event from frontend. +/// +public sealed class TtfsEvent +{ + /// + /// Event type: ttfs.skeleton, ttfs.first_evidence, ttfs.full_evidence, decision.recorded, budget.violation. + /// + [JsonPropertyName("event_type")] + public required string EventType { get; init; } + + /// + /// Alert identifier. + /// + [JsonPropertyName("alert_id")] + public required string AlertId { get; init; } + + /// + /// Duration in milliseconds. + /// + [JsonPropertyName("duration_ms")] + public double DurationMs { get; init; } + + /// + /// Evidence type (for first_evidence events). + /// + [JsonPropertyName("evidence_type")] + public string? EvidenceType { get; init; } + + /// + /// Evidence completeness score (0-4). + /// + [JsonPropertyName("completeness_score")] + public int CompletenessScore { get; init; } + + /// + /// Click count for decision events. + /// + [JsonPropertyName("click_count")] + public int ClickCount { get; init; } + + /// + /// Decision status for decision events. + /// + [JsonPropertyName("decision_status")] + public string? DecisionStatus { get; init; } + + /// + /// Phase for budget violation events. + /// + [JsonPropertyName("phase")] + public string? Phase { get; init; } + + /// + /// Budget limit in milliseconds. + /// + [JsonPropertyName("budget")] + public double Budget { get; init; } + + /// + /// Evidence bitset value. + /// + [JsonPropertyName("evidence_bitset")] + public int EvidenceBitset { get; init; } + + /// + /// Client timestamp (UTC). + /// + [JsonPropertyName("timestamp")] + public DateTimeOffset Timestamp { get; init; } +} + +/// +/// Batch of TTFS events for ingestion. +/// +public sealed class TtfsEventBatch +{ + /// + /// Events to ingest. + /// + [JsonPropertyName("events")] + public required IReadOnlyList Events { get; init; } + + /// + /// Tenant identifier. + /// + [JsonPropertyName("tenant_id")] + public string? TenantId { get; init; } + + /// + /// Session identifier. + /// + [JsonPropertyName("session_id")] + public string? SessionId { get; init; } +} + +/// +/// Known TTFS event types. +/// +public static class TtfsEventType +{ + /// + /// Skeleton UI rendered. + /// + public const string Skeleton = "ttfs.skeleton"; + + /// + /// First evidence pill rendered. + /// + public const string FirstEvidence = "ttfs.first_evidence"; + + /// + /// Full evidence loaded. + /// + public const string FullEvidence = "ttfs.full_evidence"; + + /// + /// Decision recorded. + /// + public const string DecisionRecorded = "decision.recorded"; + + /// + /// Performance budget violated. + /// + public const string BudgetViolation = "budget.violation"; +} diff --git a/src/Telemetry/StellaOps.Telemetry.Core/StellaOps.Telemetry.Core/Triage/TtfsIngestionService.cs b/src/Telemetry/StellaOps.Telemetry.Core/StellaOps.Telemetry.Core/Triage/TtfsIngestionService.cs new file mode 100644 index 000000000..1bc129fc1 --- /dev/null +++ b/src/Telemetry/StellaOps.Telemetry.Core/StellaOps.Telemetry.Core/Triage/TtfsIngestionService.cs @@ -0,0 +1,216 @@ +using System.Diagnostics; +using System.Diagnostics.Metrics; +using Microsoft.Extensions.Logging; + +namespace StellaOps.Telemetry.Core.Triage; + +/// +/// Service for ingesting TTFS telemetry events. +/// +public sealed class TtfsIngestionService +{ + private readonly ILogger _logger; + + /// + /// Initializes a new instance of the class. + /// + public TtfsIngestionService(ILogger logger) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + /// + /// Ingests a batch of TTFS events. + /// + public void IngestBatch(TtfsEventBatch batch) + { + ArgumentNullException.ThrowIfNull(batch); + ArgumentNullException.ThrowIfNull(batch.Events); + + foreach (var evt in batch.Events) + { + IngestEvent(evt, batch.TenantId); + } + } + + /// + /// Ingests a single TTFS event. + /// + public void IngestEvent(TtfsEvent evt, string? tenantId = null) + { + ArgumentNullException.ThrowIfNull(evt); + + var tags = new TagList + { + { "alert_id", evt.AlertId } + }; + if (!string.IsNullOrEmpty(tenantId)) + { + tags.Add("tenant_id", tenantId); + } + + switch (evt.EventType) + { + case TtfsEventType.Skeleton: + RecordSkeletonEvent(evt, tags); + break; + + case TtfsEventType.FirstEvidence: + RecordFirstEvidenceEvent(evt, tags); + break; + + case TtfsEventType.FullEvidence: + RecordFullEvidenceEvent(evt, tags); + break; + + case TtfsEventType.DecisionRecorded: + RecordDecisionEvent(evt, tags); + break; + + case TtfsEventType.BudgetViolation: + RecordBudgetViolation(evt, tags); + break; + + default: + _logger.LogWarning("Unknown TTFS event type: {EventType}", evt.EventType); + break; + } + } + + private void RecordSkeletonEvent(TtfsEvent evt, TagList tags) + { + var durationSeconds = evt.DurationMs / 1000.0; + TriageMetrics.TtfsSkeletonSeconds.Record(durationSeconds, tags); + + _logger.LogDebug( + "TTFS skeleton for alert {AlertId}: {Duration:F3}s", + evt.AlertId, durationSeconds); + + // Check budget (200ms) + if (evt.DurationMs > 200) + { + RecordBudgetViolation(new TtfsEvent + { + EventType = TtfsEventType.BudgetViolation, + AlertId = evt.AlertId, + Phase = "skeleton", + DurationMs = evt.DurationMs, + Budget = 200, + Timestamp = evt.Timestamp + }, tags); + } + } + + private void RecordFirstEvidenceEvent(TtfsEvent evt, TagList tags) + { + var durationSeconds = evt.DurationMs / 1000.0; + + if (!string.IsNullOrEmpty(evt.EvidenceType)) + { + tags.Add("evidence_type", evt.EvidenceType); + } + + TriageMetrics.TtfsFirstEvidenceSeconds.Record(durationSeconds, tags); + + _logger.LogDebug( + "TTFS first evidence for alert {AlertId}: {Duration:F3}s, type={Type}", + evt.AlertId, durationSeconds, evt.EvidenceType); + + // Check budget (500ms for first pill, 1500ms for p95) + if (evt.DurationMs > 500) + { + RecordBudgetViolation(new TtfsEvent + { + EventType = TtfsEventType.BudgetViolation, + AlertId = evt.AlertId, + Phase = "first_evidence", + DurationMs = evt.DurationMs, + Budget = 500, + Timestamp = evt.Timestamp + }, tags); + } + } + + private void RecordFullEvidenceEvent(TtfsEvent evt, TagList tags) + { + var durationSeconds = evt.DurationMs / 1000.0; + tags.Add("completeness", evt.CompletenessScore); + + TriageMetrics.TtfsFullEvidenceSeconds.Record(durationSeconds, tags); + TriageMetrics.EvidenceCompleteness.Record(evt.CompletenessScore, tags); + + // Record individual evidence types + var bitset = new EvidenceBitset(evt.EvidenceBitset); + if (bitset.HasReachability) + { + TriageMetrics.EvidenceByType.Add(1, + new KeyValuePair("evidence_type", "reachability")); + } + if (bitset.HasCallstack) + { + TriageMetrics.EvidenceByType.Add(1, + new KeyValuePair("evidence_type", "callstack")); + } + if (bitset.HasProvenance) + { + TriageMetrics.EvidenceByType.Add(1, + new KeyValuePair("evidence_type", "provenance")); + } + if (bitset.HasVex) + { + TriageMetrics.EvidenceByType.Add(1, + new KeyValuePair("evidence_type", "vex")); + } + + _logger.LogDebug( + "TTFS full evidence for alert {AlertId}: {Duration:F3}s, completeness={Score}", + evt.AlertId, durationSeconds, evt.CompletenessScore); + } + + private void RecordDecisionEvent(TtfsEvent evt, TagList tags) + { + var durationSeconds = evt.DurationMs / 1000.0; + + if (!string.IsNullOrEmpty(evt.DecisionStatus)) + { + tags.Add("decision_status", evt.DecisionStatus); + } + + TriageMetrics.ClicksToClosure.Record(evt.ClickCount, tags); + TriageMetrics.DecisionDurationSeconds.Record(durationSeconds, tags); + TriageMetrics.DecisionsTotal.Add(1, tags); + + _logger.LogInformation( + "Triage decision for alert {AlertId}: status={Status}, clicks={Clicks}, duration={Duration:F3}s", + evt.AlertId, evt.DecisionStatus, evt.ClickCount, durationSeconds); + + // Check clicks budget (median < 6) + if (evt.ClickCount > 6) + { + TriageMetrics.BudgetViolations.Add(1, + new KeyValuePair("phase", "clicks_to_closure"), + new KeyValuePair("budget", 6)); + + _logger.LogWarning( + "Clicks-to-closure budget exceeded for alert {AlertId}: {Clicks} clicks (budget: 6)", + evt.AlertId, evt.ClickCount); + } + } + + private void RecordBudgetViolation(TtfsEvent evt, TagList baseTags) + { + var tags = new TagList(); + foreach (var tag in baseTags) + { + tags.Add(tag); + } + tags.Add("phase", evt.Phase ?? "unknown"); + tags.Add("budget", evt.Budget); + + TriageMetrics.BudgetViolations.Add(1, tags); + + _logger.LogWarning( + "Performance budget exceeded for alert {AlertId}: phase={Phase}, actual={Actual:F0}ms, budget={Budget:F0}ms", + evt.AlertId, evt.Phase, evt.DurationMs, evt.Budget); + } +} diff --git a/src/Web/StellaOps.Web/src/app/features/triage/components/decision-drawer/decision-drawer.component.ts b/src/Web/StellaOps.Web/src/app/features/triage/components/decision-drawer/decision-drawer.component.ts new file mode 100644 index 000000000..69b832eb1 --- /dev/null +++ b/src/Web/StellaOps.Web/src/app/features/triage/components/decision-drawer/decision-drawer.component.ts @@ -0,0 +1,446 @@ +import { + Component, + Input, + Output, + EventEmitter, + HostListener, + signal, + computed, +} from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { FormsModule } from '@angular/forms'; + +export type DecisionStatus = 'affected' | 'not_affected' | 'under_investigation'; + +export interface DecisionFormData { + status: DecisionStatus; + reasonCode: string; + reasonText?: string; +} + +export interface AlertSummary { + id: string; + artifactId: string; + vulnId: string; + severity: string; +} + +@Component({ + selector: 'app-decision-drawer', + standalone: true, + imports: [CommonModule, FormsModule], + template: ` + + + + + `, + styles: [` + .decision-drawer { + position: fixed; + right: 0; + top: 0; + bottom: 0; + width: 360px; + background: var(--surface-color, #fff); + border-left: 1px solid var(--border-color, #e0e0e0); + box-shadow: -4px 0 16px rgba(0,0,0,0.1); + display: flex; + flex-direction: column; + transform: translateX(100%); + transition: transform 0.3s ease; + z-index: 101; + } + + .decision-drawer.open { + transform: translateX(0); + } + + .backdrop { + position: fixed; + inset: 0; + background: rgba(0,0,0,0.3); + z-index: 100; + } + + header { + display: flex; + justify-content: space-between; + align-items: center; + padding: 16px; + border-bottom: 1px solid var(--border-color, #e0e0e0); + } + + h3 { + margin: 0; + font-size: 18px; + } + + .close-btn { + background: none; + border: none; + font-size: 24px; + cursor: pointer; + padding: 4px 8px; + line-height: 1; + color: var(--text-secondary, #666); + } + + .close-btn:hover { + color: var(--text-primary, #333); + } + + section { + padding: 16px; + border-bottom: 1px solid var(--border-color, #e0e0e0); + } + + h4 { + margin: 0 0 12px 0; + font-size: 14px; + color: var(--text-secondary, #666); + font-weight: 600; + } + + .radio-group { + display: flex; + flex-direction: column; + gap: 8px; + } + + .radio-option { + display: flex; + align-items: center; + gap: 8px; + padding: 12px; + border: 1px solid var(--border-color, #e0e0e0); + border-radius: 8px; + cursor: pointer; + transition: all 0.2s; + } + + .radio-option:hover { + background: var(--surface-variant, #f5f5f5); + } + + .radio-option.selected { + border-color: var(--primary-color, #1976d2); + background: var(--primary-bg, #e3f2fd); + } + + .radio-option input { + position: absolute; + opacity: 0; + width: 0; + height: 0; + } + + .key-hint { + display: inline-flex; + align-items: center; + justify-content: center; + width: 24px; + height: 24px; + background: var(--surface-variant, #f5f5f5); + border-radius: 4px; + font-size: 12px; + font-weight: 600; + color: var(--text-secondary, #666); + } + + .radio-option.selected .key-hint { + background: var(--primary-color, #1976d2); + color: white; + } + + .reason-select { + width: 100%; + padding: 10px; + border: 1px solid var(--border-color, #e0e0e0); + border-radius: 4px; + margin-bottom: 8px; + font-size: 14px; + background: var(--surface-color, #fff); + } + + .reason-select:focus { + outline: 2px solid var(--primary-color, #1976d2); + outline-offset: 2px; + } + + .reason-text { + width: 100%; + padding: 10px; + border: 1px solid var(--border-color, #e0e0e0); + border-radius: 4px; + resize: vertical; + font-size: 14px; + font-family: inherit; + box-sizing: border-box; + } + + .reason-text:focus { + outline: 2px solid var(--primary-color, #1976d2); + outline-offset: 2px; + } + + .summary-list { + display: grid; + grid-template-columns: auto 1fr; + gap: 4px 12px; + font-size: 13px; + margin: 0; + } + + .summary-list dt { + color: var(--text-secondary, #666); + } + + .summary-list dd { + margin: 0; + color: var(--text-primary, #333); + } + + .truncate { + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + max-width: 200px; + } + + .hash { + font-family: ui-monospace, monospace; + font-size: 11px; + word-break: break-all; + } + + footer { + margin-top: auto; + padding: 16px; + display: flex; + gap: 8px; + justify-content: flex-end; + border-top: 1px solid var(--border-color, #e0e0e0); + } + + .btn { + padding: 10px 16px; + border-radius: 4px; + cursor: pointer; + font-size: 14px; + font-weight: 500; + transition: all 0.2s; + } + + .btn-primary { + background: var(--primary-color, #1976d2); + color: white; + border: none; + } + + .btn-primary:hover:not(:disabled) { + background: var(--primary-dark, #1565c0); + } + + .btn-secondary { + background: transparent; + border: 1px solid var(--border-color, #e0e0e0); + color: var(--text-primary, #333); + } + + .btn-secondary:hover { + background: var(--surface-variant, #f5f5f5); + } + + .btn:disabled { + opacity: 0.5; + cursor: not-allowed; + } + + .btn:focus { + outline: 2px solid var(--primary-color, #1976d2); + outline-offset: 2px; + } + `] +}) +export class DecisionDrawerComponent { + @Input() alert?: AlertSummary; + @Input() isOpen = false; + @Input() evidenceHash = ''; + @Input() policyVersion = ''; + + @Output() close = new EventEmitter(); + @Output() decisionSubmit = new EventEmitter(); + + formData = signal({ + status: 'under_investigation', + reasonCode: '', + reasonText: '', + }); + + @HostListener('document:keydown', ['$event']) + handleKeydown(event: KeyboardEvent): void { + if (!this.isOpen) return; + + // Escape to close + if (event.key === 'Escape') { + event.preventDefault(); + this.close.emit(); + return; + } + + // Don't interfere with typing in text fields + const target = event.target as HTMLElement; + if (target.tagName === 'INPUT' || target.tagName === 'TEXTAREA' || target.tagName === 'SELECT') { + return; + } + + // Quick status keys + switch (event.key.toLowerCase()) { + case 'a': + event.preventDefault(); + this.setStatus('affected'); + break; + case 'n': + event.preventDefault(); + this.setStatus('not_affected'); + break; + case 'u': + event.preventDefault(); + this.setStatus('under_investigation'); + break; + } + } + + setStatus(status: DecisionStatus): void { + this.formData.update((f) => ({ ...f, status })); + } + + setReasonCode(reasonCode: string): void { + this.formData.update((f) => ({ ...f, reasonCode })); + } + + setReasonText(reasonText: string): void { + this.formData.update((f) => ({ ...f, reasonText })); + } + + isValid(): boolean { + const data = this.formData(); + return !!data.status && !!data.reasonCode; + } + + submitDecision(): void { + if (this.isValid()) { + this.decisionSubmit.emit(this.formData()); + } + } + + resetForm(): void { + this.formData.set({ + status: 'under_investigation', + reasonCode: '', + reasonText: '', + }); + } +} diff --git a/src/Web/StellaOps.Web/src/app/features/triage/components/evidence-pills/evidence-pills.component.ts b/src/Web/StellaOps.Web/src/app/features/triage/components/evidence-pills/evidence-pills.component.ts new file mode 100644 index 000000000..4b7d6a6dc --- /dev/null +++ b/src/Web/StellaOps.Web/src/app/features/triage/components/evidence-pills/evidence-pills.component.ts @@ -0,0 +1,173 @@ +import { Component, Input, Output, EventEmitter, computed, signal } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { EvidenceBundle, EvidenceStatus, EvidenceBitset } from '../../models/evidence.model'; + +@Component({ + selector: 'app-evidence-pills', + standalone: true, + imports: [CommonModule], + template: ` +
+ + + + + + + + +
+ {{ completenessScore() }}/4 +
+
+ `, + styles: [` + .evidence-pills { + display: flex; + gap: 8px; + align-items: center; + padding: 8px 0; + border-bottom: 1px solid var(--border-color, #e0e0e0); + } + + .pill { + display: flex; + align-items: center; + gap: 4px; + padding: 4px 12px; + border-radius: 16px; + font-size: 13px; + cursor: pointer; + transition: all 0.2s; + background: var(--surface-variant, #f5f5f5); + border: 1px solid transparent; + color: var(--text-primary, #333); + } + + .pill.available { + background: var(--success-bg, #e8f5e9); + color: var(--success-text, #2e7d32); + border-color: var(--success-border, #a5d6a7); + } + + .pill.loading { + background: var(--warning-bg, #fff3e0); + color: var(--warning-text, #ef6c00); + border-color: var(--warning-border, #ffcc80); + } + + .pill.unavailable { + background: var(--error-bg, #ffebee); + color: var(--error-text, #c62828); + border-color: var(--error-border, #ef9a9a); + opacity: 0.7; + } + + .pill.pending { + background: var(--info-bg, #e3f2fd); + color: var(--info-text, #1565c0); + border-color: var(--info-border, #90caf9); + } + + .pill:hover { + transform: translateY(-1px); + box-shadow: 0 2px 4px rgba(0,0,0,0.1); + } + + .pill:focus { + outline: 2px solid var(--primary-color, #1976d2); + outline-offset: 2px; + } + + .icon { + font-size: 14px; + } + + .label { + font-weight: 500; + } + + .completeness-badge { + margin-left: auto; + font-weight: 600; + color: var(--text-secondary, #666); + padding: 4px 8px; + background: var(--surface-variant, #f5f5f5); + border-radius: 12px; + font-size: 12px; + } + `] +}) +export class EvidencePillsComponent { + private _evidence = signal(undefined); + + @Input() + set evidence(value: EvidenceBundle | undefined) { + this._evidence.set(value); + } + + @Output() pillClick = new EventEmitter<'reachability' | 'callstack' | 'provenance' | 'vex'>(); + + reachabilityStatus = computed(() => this._evidence()?.reachability?.status ?? 'unavailable'); + callstackStatus = computed(() => this._evidence()?.callstack?.status ?? 'unavailable'); + provenanceStatus = computed(() => this._evidence()?.provenance?.status ?? 'unavailable'); + vexStatus = computed(() => this._evidence()?.vex?.status ?? 'unavailable'); + + completenessScore = computed(() => { + const bundle = this._evidence(); + return EvidenceBitset.fromBundle(bundle).completenessScore; + }); + + getIcon(status: EvidenceStatus): string { + switch (status) { + case 'available': + return '\u2713'; // checkmark + case 'loading': + return '\u23F3'; // hourglass + case 'unavailable': + return '\u2717'; // X mark + case 'error': + return '\u26A0'; // warning + case 'pending_enrichment': + return '\u2026'; // ellipsis + default: + return '?'; + } + } +} diff --git a/src/Web/StellaOps.Web/src/app/features/triage/models/evidence.model.ts b/src/Web/StellaOps.Web/src/app/features/triage/models/evidence.model.ts new file mode 100644 index 000000000..3e9574872 --- /dev/null +++ b/src/Web/StellaOps.Web/src/app/features/triage/models/evidence.model.ts @@ -0,0 +1,120 @@ +/** + * Evidence status for triage UI. + */ +export type EvidenceStatus = 'available' | 'loading' | 'unavailable' | 'error' | 'pending_enrichment'; + +/** + * Evidence bundle for an alert. + */ +export interface EvidenceBundle { + alertId: string; + reachability?: EvidenceSection; + callstack?: EvidenceSection; + provenance?: EvidenceSection; + vex?: VexEvidenceSection; + hashes?: EvidenceHashes; + computedAt: string; +} + +/** + * Individual evidence section. + */ +export interface EvidenceSection { + status: EvidenceStatus; + hash?: string; + proof?: unknown; + unavailableReason?: string; +} + +/** + * VEX evidence section with history. + */ +export interface VexEvidenceSection { + status: EvidenceStatus; + current?: VexStatement; + history?: VexStatement[]; +} + +/** + * VEX statement summary. + */ +export interface VexStatement { + statementId: string; + status: string; + justification?: string; + impactStatement?: string; + timestamp: string; + issuer?: string; +} + +/** + * Evidence hashes for verification. + */ +export interface EvidenceHashes { + combinedHash?: string; + hashes?: string[]; +} + +/** + * Evidence bitset for completeness tracking. + */ +export class EvidenceBitset { + private static readonly REACHABILITY = 1 << 0; + private static readonly CALLSTACK = 1 << 1; + private static readonly PROVENANCE = 1 << 2; + private static readonly VEX = 1 << 3; + + constructor(public value: number = 0) {} + + get hasReachability(): boolean { + return (this.value & EvidenceBitset.REACHABILITY) !== 0; + } + + get hasCallstack(): boolean { + return (this.value & EvidenceBitset.CALLSTACK) !== 0; + } + + get hasProvenance(): boolean { + return (this.value & EvidenceBitset.PROVENANCE) !== 0; + } + + get hasVex(): boolean { + return (this.value & EvidenceBitset.VEX) !== 0; + } + + /** + * Completeness score (0-4). + */ + get completenessScore(): number { + let score = 0; + if (this.hasReachability) score++; + if (this.hasCallstack) score++; + if (this.hasProvenance) score++; + if (this.hasVex) score++; + return score; + } + + static from(evidence: { + reachability?: boolean; + callstack?: boolean; + provenance?: boolean; + vex?: boolean; + }): EvidenceBitset { + let value = 0; + if (evidence.reachability) value |= EvidenceBitset.REACHABILITY; + if (evidence.callstack) value |= EvidenceBitset.CALLSTACK; + if (evidence.provenance) value |= EvidenceBitset.PROVENANCE; + if (evidence.vex) value |= EvidenceBitset.VEX; + return new EvidenceBitset(value); + } + + static fromBundle(bundle?: EvidenceBundle): EvidenceBitset { + if (!bundle) return new EvidenceBitset(0); + return EvidenceBitset.from({ + reachability: bundle.reachability?.status === 'available', + callstack: bundle.callstack?.status === 'available', + provenance: bundle.provenance?.status === 'available', + vex: bundle.vex?.status === 'available', + }); + } +} diff --git a/src/Web/StellaOps.Web/src/app/features/triage/services/ttfs-telemetry.service.ts b/src/Web/StellaOps.Web/src/app/features/triage/services/ttfs-telemetry.service.ts new file mode 100644 index 000000000..073cbe25f --- /dev/null +++ b/src/Web/StellaOps.Web/src/app/features/triage/services/ttfs-telemetry.service.ts @@ -0,0 +1,273 @@ +import { Injectable, inject } from '@angular/core'; +import { HttpClient } from '@angular/common/http'; +import { EvidenceBitset } from '../models/evidence.model'; + +/** + * TTFS timing data for an alert. + */ +export interface TtfsTimings { + alertId: string; + alertCreatedAt: number; + ttfsStartAt: number; + skeletonRenderedAt?: number; + firstEvidenceAt?: number; + fullEvidenceAt?: number; + decisionRecordedAt?: number; + clickCount: number; + evidenceBitset: number; +} + +/** + * TTFS event for backend ingestion. + */ +interface TtfsEvent { + event_type: string; + alert_id: string; + duration_ms: number; + evidence_type?: string; + completeness_score?: number; + click_count?: number; + decision_status?: string; + phase?: string; + budget?: number; + evidence_bitset?: number; + timestamp: string; +} + +/** + * Performance budgets in milliseconds. + */ +const BUDGETS = { + skeleton: 200, + firstEvidence: 500, + fullEvidence: 1500, + clicksToClosure: 6, +} as const; + +/** + * Service for tracking Time-to-First-Signal (TTFS) telemetry. + * Measures time from alert creation to first evidence render. + */ +@Injectable({ providedIn: 'root' }) +export class TtfsTelemetryService { + private readonly http = inject(HttpClient); + private readonly activeTimings = new Map(); + private readonly pendingEvents: TtfsEvent[] = []; + private flushTimeout: ReturnType | null = null; + + /** + * Starts TTFS tracking for an alert. + */ + startTracking(alertId: string, alertCreatedAt: Date): void { + const timing: TtfsTimings = { + alertId, + alertCreatedAt: alertCreatedAt.getTime(), + ttfsStartAt: performance.now(), + clickCount: 0, + evidenceBitset: 0, + }; + + this.activeTimings.set(alertId, timing); + + this.queueEvent({ + event_type: 'ttfs.start', + alert_id: alertId, + duration_ms: 0, + timestamp: new Date().toISOString(), + }); + } + + /** + * Records skeleton UI render. + */ + recordSkeletonRender(alertId: string): void { + const timing = this.activeTimings.get(alertId); + if (!timing) return; + + timing.skeletonRenderedAt = performance.now(); + const duration = timing.skeletonRenderedAt - timing.ttfsStartAt; + + this.queueEvent({ + event_type: 'ttfs.skeleton', + alert_id: alertId, + duration_ms: duration, + timestamp: new Date().toISOString(), + }); + + // Check against budget + if (duration > BUDGETS.skeleton) { + this.recordBudgetViolation(alertId, 'skeleton', duration, BUDGETS.skeleton); + } + } + + /** + * Records first evidence pill paint (primary TTFS metric). + */ + recordFirstEvidence(alertId: string, evidenceType: string): void { + const timing = this.activeTimings.get(alertId); + if (!timing || timing.firstEvidenceAt) return; + + timing.firstEvidenceAt = performance.now(); + const duration = timing.firstEvidenceAt - timing.ttfsStartAt; + + this.queueEvent({ + event_type: 'ttfs.first_evidence', + alert_id: alertId, + duration_ms: duration, + evidence_type: evidenceType, + timestamp: new Date().toISOString(), + }); + + // Check against budget + if (duration > BUDGETS.firstEvidence) { + this.recordBudgetViolation(alertId, 'first_evidence', duration, BUDGETS.firstEvidence); + } + } + + /** + * Records full evidence load complete. + */ + recordFullEvidence(alertId: string, bitset: EvidenceBitset): void { + const timing = this.activeTimings.get(alertId); + if (!timing) return; + + timing.fullEvidenceAt = performance.now(); + timing.evidenceBitset = bitset.value; + + const duration = timing.fullEvidenceAt - timing.ttfsStartAt; + + this.queueEvent({ + event_type: 'ttfs.full_evidence', + alert_id: alertId, + duration_ms: duration, + completeness_score: bitset.completenessScore, + evidence_bitset: bitset.value, + timestamp: new Date().toISOString(), + }); + + // Check against budget + if (duration > BUDGETS.fullEvidence) { + this.recordBudgetViolation(alertId, 'full_evidence', duration, BUDGETS.fullEvidence); + } + } + + /** + * Records a user interaction (click, keyboard). + */ + recordInteraction(alertId: string, interactionType: string): void { + const timing = this.activeTimings.get(alertId); + if (!timing) return; + + timing.clickCount++; + + this.queueEvent({ + event_type: 'triage.interaction', + alert_id: alertId, + duration_ms: performance.now() - timing.ttfsStartAt, + evidence_type: interactionType, + click_count: timing.clickCount, + timestamp: new Date().toISOString(), + }); + } + + /** + * Records decision completion and final metrics. + */ + recordDecision(alertId: string, decisionStatus: string): void { + const timing = this.activeTimings.get(alertId); + if (!timing) return; + + timing.decisionRecordedAt = performance.now(); + const totalDuration = timing.decisionRecordedAt - timing.ttfsStartAt; + + this.queueEvent({ + event_type: 'decision.recorded', + alert_id: alertId, + duration_ms: totalDuration, + click_count: timing.clickCount, + decision_status: decisionStatus, + evidence_bitset: timing.evidenceBitset, + completeness_score: new EvidenceBitset(timing.evidenceBitset).completenessScore, + timestamp: new Date().toISOString(), + }); + + // Check clicks-to-closure budget + if (timing.clickCount > BUDGETS.clicksToClosure) { + this.recordBudgetViolation(alertId, 'clicks_to_closure', timing.clickCount, BUDGETS.clicksToClosure); + } + + // Cleanup + this.activeTimings.delete(alertId); + + // Flush events after decision + this.flushEvents(); + } + + /** + * Cancels tracking for an alert (e.g., user navigates away). + */ + cancelTracking(alertId: string): void { + this.activeTimings.delete(alertId); + } + + /** + * Gets current timing data for an alert. + */ + getTimings(alertId: string): TtfsTimings | undefined { + return this.activeTimings.get(alertId); + } + + /** + * Gets current click count for an alert. + */ + getClickCount(alertId: string): number { + return this.activeTimings.get(alertId)?.clickCount ?? 0; + } + + private recordBudgetViolation(alertId: string, phase: string, actual: number, budget: number): void { + this.queueEvent({ + event_type: 'budget.violation', + alert_id: alertId, + duration_ms: actual, + phase, + budget, + timestamp: new Date().toISOString(), + }); + } + + private queueEvent(event: TtfsEvent): void { + this.pendingEvents.push(event); + + // Schedule flush if not already scheduled + if (!this.flushTimeout) { + this.flushTimeout = setTimeout(() => this.flushEvents(), 5000); + } + + // Flush immediately if we have too many events + if (this.pendingEvents.length >= 20) { + this.flushEvents(); + } + } + + private flushEvents(): void { + if (this.flushTimeout) { + clearTimeout(this.flushTimeout); + this.flushTimeout = null; + } + + if (this.pendingEvents.length === 0) return; + + const events = [...this.pendingEvents]; + this.pendingEvents.length = 0; + + // Send to backend + this.http + .post('/api/v1/telemetry/ttfs', { events }) + .subscribe({ + error: (err) => { + // Log but don't fail - telemetry should be non-blocking + console.warn('Failed to send TTFS telemetry:', err); + }, + }); + } +} diff --git a/src/Web/StellaOps.Web/src/app/features/triage/triage-workspace.component.html b/src/Web/StellaOps.Web/src/app/features/triage/triage-workspace.component.html index ac9f88930..2cb579260 100644 --- a/src/Web/StellaOps.Web/src/app/features/triage/triage-workspace.component.html +++ b/src/Web/StellaOps.Web/src/app/features/triage/triage-workspace.component.html @@ -100,7 +100,28 @@
+ + @if (selectedVuln()) { + + } +
+ +
+ + +
+
+

Reachability

+ + {{ selectedEvidenceBundle()?.reachability?.status ?? 'unknown' }} + +
+
+ @if (selectedVuln()!.vuln.reachabilityStatus === 'reachable') { +

Vulnerable code is reachable from application entry points.

+

Score: {{ selectedVuln()!.vuln.reachabilityScore ?? 0 }}

+ + } @else if (selectedVuln()!.vuln.reachabilityStatus === 'unreachable') { +

Vulnerable code is not reachable from application entry points.

+ } @else { +

Reachability analysis pending or unavailable.

+ } +
+
+ + +
+
+

Call Stack

+ + {{ selectedEvidenceBundle()?.callstack?.status ?? 'unknown' }} + +
+
+ @if (selectedEvidenceBundle()?.callstack?.status === 'available') { +

Call stack evidence available from reachability analysis.

+ } @else { +

Call stack evidence unavailable.

+ } +
+
+ + +
+
+

Provenance

+ + {{ selectedEvidenceBundle()?.provenance?.status ?? 'unknown' }} + +
+
+ @if (hasSignedEvidence(selectedVuln()!)) { +

Signed attestation available for this artifact.

+ + } @else { +

Awaiting signed provenance attestation.

+ } +
+
+ + +
+
+

VEX Decision

+ + {{ selectedEvidenceBundle()?.vex?.status ?? 'unknown' }} + +
+
+ @if (getVexBadgeForFinding(selectedVuln()!); as vexBadge) { +

{{ vexBadge }}

+ + } @else { +

No VEX decision recorded.

+ + } +
+
+
} @else if (activeTab() === 'overview') {
} + + + @if (showVexModal()) { (VULNERABILITY_API); private readonly vexApi = inject(VEX_DECISIONS_API); private readonly shortcuts = inject(TriageShortcutsService); + private readonly ttfsTelemetry = inject(TtfsTelemetryService); @ViewChild('reachabilitySearchInput') private readonly reachabilitySearchInput?: ElementRef; @@ -99,7 +106,11 @@ export class TriageWorkspaceComponent implements OnInit, OnDestroy { readonly selectedVulnId = signal(null); readonly selectedForBulk = signal([]); - readonly activeTab = signal('overview'); + readonly activeTab = signal('evidence'); + + // Decision drawer state + readonly showDecisionDrawer = signal(false); + readonly currentEvidence = signal(undefined); readonly showVexModal = signal(false); readonly vexTargetVulnerabilityIds = signal([]); @@ -125,6 +136,50 @@ export class TriageWorkspaceComponent implements OnInit, OnDestroy { return id ? this.findings().find((f) => f.vuln.vulnId === id) ?? null : null; }); + readonly selectedAlertSummary = computed(() => { + const selected = this.selectedVuln(); + if (!selected) return undefined; + return { + id: selected.vuln.vulnId, + artifactId: this.artifactId(), + vulnId: selected.vuln.cveId, + severity: selected.vuln.severity, + }; + }); + + readonly selectedEvidenceBundle = computed(() => { + const selected = this.selectedVuln(); + if (!selected) return undefined; + + // Build mock evidence bundle based on vulnerability data + const vuln = selected.vuln; + return { + alertId: vuln.vulnId, + reachability: { + status: vuln.reachabilityStatus === 'reachable' ? 'available' + : vuln.reachabilityStatus === 'unreachable' ? 'available' + : vuln.reachabilityStatus === 'unknown' ? 'loading' + : 'unavailable', + hash: `reach-${vuln.vulnId}`, + }, + callstack: { + status: vuln.reachabilityStatus === 'reachable' ? 'available' : 'unavailable', + }, + provenance: { + status: this.hasSignedEvidence(selected) ? 'available' : 'pending_enrichment', + }, + vex: { + status: this.getVexBadgeForFinding(selected) ? 'available' : 'unavailable', + current: this.getVexBadgeForFinding(selected) ? { + statementId: `vex-${vuln.vulnId}`, + status: this.getVexBadgeForFinding(selected) ?? 'unknown', + timestamp: new Date().toISOString(), + } : undefined, + }, + computedAt: new Date().toISOString(), + }; + }); + readonly findings = computed(() => { const id = this.artifactId(); if (!id) return []; @@ -302,9 +357,26 @@ export class TriageWorkspaceComponent implements OnInit, OnDestroy { } selectFinding(vulnId: string, options?: { resetTab?: boolean }): void { + const previousId = this.selectedVulnId(); + + // If changing selection, start new TTFS tracking + if (previousId !== vulnId) { + // Cancel tracking on previous alert if any + if (previousId) { + this.ttfsTelemetry.cancelTracking(previousId); + } + + // Start tracking for new alert + const finding = this.findings().find((f) => f.vuln.vulnId === vulnId); + if (finding) { + const alertCreatedAt = finding.vuln.publishedAt ? new Date(finding.vuln.publishedAt) : new Date(); + this.ttfsTelemetry.startTracking(vulnId, alertCreatedAt); + } + } + this.selectedVulnId.set(vulnId); if (options?.resetTab ?? true) { - this.activeTab.set('overview'); + this.activeTab.set('evidence'); } } @@ -353,6 +425,69 @@ export class TriageWorkspaceComponent implements OnInit, OnDestroy { this.vexExistingDecision.set(null); } + // Decision drawer methods + openDecisionDrawer(): void { + const vulnId = this.selectedVulnId(); + if (vulnId) { + this.ttfsTelemetry.recordInteraction(vulnId, 'open_drawer'); + } + this.showDecisionDrawer.set(true); + } + + closeDecisionDrawer(): void { + this.showDecisionDrawer.set(false); + } + + onDecisionDrawerSubmit(decision: DecisionFormData): void { + const selected = this.selectedVuln(); + if (!selected) return; + + const vulnId = selected.vuln.vulnId; + + // Record TTFS decision event + this.ttfsTelemetry.recordDecision(vulnId, decision.status); + + // Convert to VEX and submit + const vexStatus = this.mapDecisionStatusToVex(decision.status); + this.vexTargetVulnerabilityIds.set([selected.vuln.cveId]); + this.vexModalInitialStatus.set(vexStatus); + this.showVexModal.set(true); + + this.closeDecisionDrawer(); + } + + private mapDecisionStatusToVex(status: DecisionFormData['status']): VexStatus { + switch (status) { + case 'affected': + return 'AFFECTED_UNMITIGATED'; + case 'not_affected': + return 'NOT_AFFECTED'; + case 'under_investigation': + default: + return 'UNDER_INVESTIGATION'; + } + } + + onEvidencePillClick(evidenceType: 'reachability' | 'callstack' | 'provenance' | 'vex'): void { + const vulnId = this.selectedVulnId(); + if (vulnId) { + this.ttfsTelemetry.recordInteraction(vulnId, `pill_click_${evidenceType}`); + } + + // Navigate to relevant evidence section + if (evidenceType === 'reachability') { + this.activeTab.set('reachability'); + } else if (evidenceType === 'vex') { + this.openDecisionDrawer(); + } + } + + // Get evidence hash for audit trail + getEvidenceHash(): string { + const evidence = this.currentEvidence(); + return evidence?.hashes?.combinedHash ?? ''; + } + onVexSaved(decisions: readonly VexDecision[]): void { const updated = [...this.vexDecisions(), ...decisions].sort((a, b) => { const aWhen = a.updatedAt ?? a.createdAt; @@ -467,7 +602,7 @@ export class TriageWorkspaceComponent implements OnInit, OnDestroy { } private isShortcutOverlayOpen(): boolean { - return this.showVexModal() || this.showKeyboardHelp() || this.showReachabilityDrawer() || this.attestationModal() !== null; + return this.showVexModal() || this.showKeyboardHelp() || this.showReachabilityDrawer() || this.attestationModal() !== null || this.showDecisionDrawer(); } private jumpToIncompleteEvidencePane(): void { @@ -576,6 +711,7 @@ export class TriageWorkspaceComponent implements OnInit, OnDestroy { if (this.showKeyboardHelp()) this.showKeyboardHelp.set(false); if (this.showReachabilityDrawer()) this.closeReachabilityDrawer(); if (this.attestationModal()) this.attestationModal.set(null); + if (this.showDecisionDrawer()) this.closeDecisionDrawer(); } private toggleKeyboardHelp(): void { diff --git a/src/Zastava/__Libraries/StellaOps.Zastava.Core/Validation/SurfaceCacheValidator.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Validation/SurfaceCacheValidator.cs index 913bb70c2..94ee76f59 100644 --- a/src/Zastava/__Libraries/StellaOps.Zastava.Core/Validation/SurfaceCacheValidator.cs +++ b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Validation/SurfaceCacheValidator.cs @@ -94,8 +94,8 @@ public sealed class SurfaceCacheValidator : IHostedService if (staleCount > 0) { - var oldestEntry = entries.OrderBy(e => e.LastModified).FirstOrDefault(); - var oldestAge = oldestEntry is not null ? now - oldestEntry.LastModified : TimeSpan.Zero; + var oldestEntry = entries.OrderBy(e => e.LastModified).First(); + var oldestAge = now - oldestEntry.LastModified; if (freshCount == 0) { diff --git a/tests/AirGap/StellaOps.AirGap.Importer.Tests/GlobalUsings.cs b/tests/AirGap/StellaOps.AirGap.Importer.Tests/GlobalUsings.cs new file mode 100644 index 000000000..c802f4480 --- /dev/null +++ b/tests/AirGap/StellaOps.AirGap.Importer.Tests/GlobalUsings.cs @@ -0,0 +1 @@ +global using Xunit; diff --git a/tests/AirGap/StellaOps.AirGap.Importer.Tests/ImportValidatorTests.cs b/tests/AirGap/StellaOps.AirGap.Importer.Tests/ImportValidatorTests.cs index 6ab2644a1..5a9716f66 100644 --- a/tests/AirGap/StellaOps.AirGap.Importer.Tests/ImportValidatorTests.cs +++ b/tests/AirGap/StellaOps.AirGap.Importer.Tests/ImportValidatorTests.cs @@ -1,22 +1,61 @@ using System.Security.Cryptography; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; using StellaOps.AirGap.Importer.Contracts; +using StellaOps.AirGap.Importer.Quarantine; using StellaOps.AirGap.Importer.Validation; +using StellaOps.AirGap.Importer.Versioning; namespace StellaOps.AirGap.Importer.Tests; -public class ImportValidatorTests +public sealed class ImportValidatorTests { [Fact] - public void FailsWhenTufInvalid() + public async Task ValidateAsync_WhenTufInvalid_ShouldFailAndQuarantine() { - var request = BuildRequest(rootJson: "{}", snapshotJson: "{}", timestampJson: "{}"); - var result = new ImportValidator().Validate(request); - Assert.False(result.IsValid); - Assert.StartsWith("tuf:", result.Reason); + var quarantine = new CapturingQuarantineService(); + var monotonicity = new CapturingMonotonicityChecker(); + + var validator = new ImportValidator( + new DsseVerifier(), + new TufMetadataValidator(), + new MerkleRootCalculator(), + new RootRotationPolicy(), + monotonicity, + quarantine, + NullLogger.Instance); + + var tempRoot = Path.Combine(Path.GetTempPath(), "stellaops-airgap-tests", Guid.NewGuid().ToString("N")); + Directory.CreateDirectory(tempRoot); + var bundlePath = Path.Combine(tempRoot, "bundle.tar.zst"); + await File.WriteAllTextAsync(bundlePath, "bundle-bytes"); + + try + { + var request = BuildRequest(bundlePath, rootJson: "{}", snapshotJson: "{}", timestampJson: "{}"); + var result = await validator.ValidateAsync(request); + + result.IsValid.Should().BeFalse(); + result.Reason.Should().StartWith("tuf:"); + + quarantine.Requests.Should().HaveCount(1); + quarantine.Requests[0].TenantId.Should().Be("tenant-a"); + } + finally + { + try + { + Directory.Delete(tempRoot, recursive: true); + } + catch + { + // best-effort cleanup + } + } } [Fact] - public void SucceedsWhenAllChecksPass() + public async Task ValidateAsync_WhenAllChecksPass_ShouldSucceedAndRecordActivation() { var root = "{\"version\":1,\"expiresUtc\":\"2030-01-01T00:00:00Z\"}"; var snapshot = "{\"version\":1,\"expiresUtc\":\"2030-01-01T00:00:00Z\",\"meta\":{\"snapshot\":{\"hashes\":{\"sha256\":\"abc\"}}}}"; @@ -39,20 +78,66 @@ public class ImportValidatorTests trustStore.LoadActive(new Dictionary { ["k1"] = pub }); trustStore.StagePending(new Dictionary { ["k2"] = pub }); - var request = new ImportValidationRequest( - envelope, - new TrustRootConfig("/tmp/root.json", new[] { Fingerprint(pub) }, new[] { "rsassa-pss-sha256" }, null, null, new Dictionary { ["k1"] = pub }), - root, - snapshot, - timestamp, - new List { new("a.txt", new MemoryStream("data"u8.ToArray())) }, - trustStore, - new[] { "approver-1", "approver-2" }); + var quarantine = new CapturingQuarantineService(); + var monotonicity = new CapturingMonotonicityChecker(); - var result = new ImportValidator().Validate(request); + var validator = new ImportValidator( + new DsseVerifier(), + new TufMetadataValidator(), + new MerkleRootCalculator(), + new RootRotationPolicy(), + monotonicity, + quarantine, + NullLogger.Instance); - Assert.True(result.IsValid); - Assert.Equal("import-validated", result.Reason); + var tempRoot = Path.Combine(Path.GetTempPath(), "stellaops-airgap-tests", Guid.NewGuid().ToString("N")); + Directory.CreateDirectory(tempRoot); + var bundlePath = Path.Combine(tempRoot, "bundle.tar.zst"); + await File.WriteAllTextAsync(bundlePath, "bundle-bytes"); + + try + { + var request = new ImportValidationRequest( + TenantId: "tenant-a", + BundleType: "offline-kit", + BundleDigest: "sha256:bundle", + BundlePath: bundlePath, + ManifestJson: "{\"version\":\"1.0.0\"}", + ManifestVersion: "1.0.0", + ManifestCreatedAt: DateTimeOffset.Parse("2025-12-15T00:00:00Z"), + ForceActivate: false, + ForceActivateReason: null, + Envelope: envelope, + TrustRoots: new TrustRootConfig("/tmp/root.json", new[] { Fingerprint(pub) }, new[] { "rsassa-pss-sha256" }, null, null, new Dictionary { ["k1"] = pub }), + RootJson: root, + SnapshotJson: snapshot, + TimestampJson: timestamp, + PayloadEntries: new List { new("a.txt", new MemoryStream("data"u8.ToArray())) }, + TrustStore: trustStore, + ApproverIds: new[] { "approver-1", "approver-2" }); + + var result = await validator.ValidateAsync(request); + + result.IsValid.Should().BeTrue(); + result.Reason.Should().Be("import-validated"); + + monotonicity.RecordedActivations.Should().HaveCount(1); + monotonicity.RecordedActivations[0].BundleDigest.Should().Be("sha256:bundle"); + monotonicity.RecordedActivations[0].Version.SemVer.Should().Be("1.0.0"); + + quarantine.Requests.Should().BeEmpty(); + } + finally + { + try + { + Directory.Delete(tempRoot, recursive: true); + } + catch + { + // best-effort cleanup + } + } } private static byte[] BuildPae(string payloadType, string payload) @@ -74,19 +159,80 @@ public class ImportValidatorTests private static string Fingerprint(byte[] pub) => Convert.ToHexString(SHA256.HashData(pub)).ToLowerInvariant(); - private static ImportValidationRequest BuildRequest(string rootJson, string snapshotJson, string timestampJson) + private static ImportValidationRequest BuildRequest(string bundlePath, string rootJson, string snapshotJson, string timestampJson) { var envelope = new DsseEnvelope("text/plain", Convert.ToBase64String("hi"u8), Array.Empty()); var trustRoot = TrustRootConfig.Empty("/tmp"); var trustStore = new TrustStore(); return new ImportValidationRequest( - envelope, - trustRoot, - rootJson, - snapshotJson, - timestampJson, - Array.Empty(), - trustStore, - Array.Empty()); + TenantId: "tenant-a", + BundleType: "offline-kit", + BundleDigest: "sha256:bundle", + BundlePath: bundlePath, + ManifestJson: null, + ManifestVersion: "1.0.0", + ManifestCreatedAt: DateTimeOffset.Parse("2025-12-15T00:00:00Z"), + ForceActivate: false, + ForceActivateReason: null, + Envelope: envelope, + TrustRoots: trustRoot, + RootJson: rootJson, + SnapshotJson: snapshotJson, + TimestampJson: timestampJson, + PayloadEntries: Array.Empty(), + TrustStore: trustStore, + ApproverIds: Array.Empty()); + } + + private sealed class CapturingMonotonicityChecker : IVersionMonotonicityChecker + { + public List<(BundleVersion Version, string BundleDigest)> RecordedActivations { get; } = new(); + + public Task CheckAsync(string tenantId, string bundleType, BundleVersion incomingVersion, CancellationToken cancellationToken = default) + { + return Task.FromResult(new MonotonicityCheckResult( + IsMonotonic: true, + CurrentVersion: null, + CurrentBundleDigest: null, + CurrentActivatedAt: null, + ReasonCode: "FIRST_ACTIVATION")); + } + + public Task RecordActivationAsync( + string tenantId, + string bundleType, + BundleVersion version, + string bundleDigest, + bool wasForceActivated = false, + string? forceActivateReason = null, + CancellationToken cancellationToken = default) + { + RecordedActivations.Add((version, bundleDigest)); + return Task.CompletedTask; + } + } + + private sealed class CapturingQuarantineService : IQuarantineService + { + public List Requests { get; } = new(); + + public Task QuarantineAsync(QuarantineRequest request, CancellationToken cancellationToken = default) + { + Requests.Add(request); + return Task.FromResult(new QuarantineResult( + Success: true, + QuarantineId: "test", + QuarantinePath: "(memory)", + QuarantinedAt: DateTimeOffset.UnixEpoch)); + } + + public Task> ListAsync(string tenantId, QuarantineListOptions? options = null, CancellationToken cancellationToken = default) => + Task.FromResult>(Array.Empty()); + + public Task RemoveAsync(string tenantId, string quarantineId, string removalReason, CancellationToken cancellationToken = default) => + Task.FromResult(false); + + public Task CleanupExpiredAsync(TimeSpan retentionPeriod, CancellationToken cancellationToken = default) => + Task.FromResult(0); } } diff --git a/tests/AirGap/StellaOps.AirGap.Importer.Tests/Quarantine/FileSystemQuarantineServiceTests.cs b/tests/AirGap/StellaOps.AirGap.Importer.Tests/Quarantine/FileSystemQuarantineServiceTests.cs new file mode 100644 index 000000000..dc5e45d04 --- /dev/null +++ b/tests/AirGap/StellaOps.AirGap.Importer.Tests/Quarantine/FileSystemQuarantineServiceTests.cs @@ -0,0 +1,155 @@ +using System.Text.Json; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.AirGap.Importer.Quarantine; + +namespace StellaOps.AirGap.Importer.Tests.Quarantine; + +public sealed class FileSystemQuarantineServiceTests +{ + [Fact] + public async Task QuarantineAsync_ShouldCreateExpectedFiles_AndListAsyncShouldReturnEntry() + { + var root = CreateTempDirectory(); + try + { + var bundlePath = Path.Combine(root, "bundle.tar.zst"); + await File.WriteAllTextAsync(bundlePath, "bundle-bytes"); + + var options = Options.Create(new QuarantineOptions + { + QuarantineRoot = Path.Combine(root, "quarantine"), + RetentionPeriod = TimeSpan.FromDays(30), + MaxQuarantineSizeBytes = 1024 * 1024, + EnableAutomaticCleanup = true + }); + + var svc = new FileSystemQuarantineService( + options, + NullLogger.Instance, + TimeProvider.System); + + var result = await svc.QuarantineAsync(new QuarantineRequest( + TenantId: "tenant-a", + BundlePath: bundlePath, + ManifestJson: "{\"version\":\"1.0.0\"}", + ReasonCode: "dsse:invalid", + ReasonMessage: "dsse:invalid", + VerificationLog: new[] { "tuf:ok", "dsse:invalid" }, + Metadata: new Dictionary { ["k"] = "v" })); + + result.Success.Should().BeTrue(); + Directory.Exists(result.QuarantinePath).Should().BeTrue(); + + File.Exists(Path.Combine(result.QuarantinePath, "bundle.tar.zst")).Should().BeTrue(); + File.Exists(Path.Combine(result.QuarantinePath, "manifest.json")).Should().BeTrue(); + File.Exists(Path.Combine(result.QuarantinePath, "verification.log")).Should().BeTrue(); + File.Exists(Path.Combine(result.QuarantinePath, "failure-reason.txt")).Should().BeTrue(); + File.Exists(Path.Combine(result.QuarantinePath, "quarantine.json")).Should().BeTrue(); + + var listed = await svc.ListAsync("tenant-a"); + listed.Should().ContainSingle(e => e.QuarantineId == result.QuarantineId); + } + finally + { + SafeDeleteDirectory(root); + } + } + + [Fact] + public async Task RemoveAsync_ShouldMoveToRemovedFolder() + { + var root = CreateTempDirectory(); + try + { + var bundlePath = Path.Combine(root, "bundle.tar.zst"); + await File.WriteAllTextAsync(bundlePath, "bundle-bytes"); + + var quarantineRoot = Path.Combine(root, "quarantine"); + var options = Options.Create(new QuarantineOptions { QuarantineRoot = quarantineRoot, MaxQuarantineSizeBytes = 1024 * 1024 }); + var svc = new FileSystemQuarantineService(options, NullLogger.Instance, TimeProvider.System); + + var result = await svc.QuarantineAsync(new QuarantineRequest( + TenantId: "tenant-a", + BundlePath: bundlePath, + ManifestJson: null, + ReasonCode: "tuf:invalid", + ReasonMessage: "tuf:invalid", + VerificationLog: new[] { "tuf:invalid" })); + + var removed = await svc.RemoveAsync("tenant-a", result.QuarantineId, "investigated"); + removed.Should().BeTrue(); + + Directory.Exists(result.QuarantinePath).Should().BeFalse(); + Directory.Exists(Path.Combine(quarantineRoot, "tenant-a", ".removed", result.QuarantineId)).Should().BeTrue(); + } + finally + { + SafeDeleteDirectory(root); + } + } + + [Fact] + public async Task CleanupExpiredAsync_ShouldDeleteOldEntries() + { + var root = CreateTempDirectory(); + try + { + var bundlePath = Path.Combine(root, "bundle.tar.zst"); + await File.WriteAllTextAsync(bundlePath, "bundle-bytes"); + + var quarantineRoot = Path.Combine(root, "quarantine"); + var options = Options.Create(new QuarantineOptions { QuarantineRoot = quarantineRoot, MaxQuarantineSizeBytes = 1024 * 1024 }); + var svc = new FileSystemQuarantineService(options, NullLogger.Instance, TimeProvider.System); + + var result = await svc.QuarantineAsync(new QuarantineRequest( + TenantId: "tenant-a", + BundlePath: bundlePath, + ManifestJson: null, + ReasonCode: "tuf:invalid", + ReasonMessage: "tuf:invalid", + VerificationLog: new[] { "tuf:invalid" })); + + var jsonPath = Path.Combine(result.QuarantinePath, "quarantine.json"); + var json = await File.ReadAllTextAsync(jsonPath); + var jsonOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true }; + var entry = JsonSerializer.Deserialize(json, jsonOptions); + entry.Should().NotBeNull(); + + var oldEntry = entry! with { QuarantinedAt = DateTimeOffset.Parse("1900-01-01T00:00:00Z") }; + await File.WriteAllTextAsync(jsonPath, JsonSerializer.Serialize(oldEntry, jsonOptions)); + + var removed = await svc.CleanupExpiredAsync(TimeSpan.FromDays(30)); + removed.Should().BeGreaterThanOrEqualTo(1); + Directory.Exists(result.QuarantinePath).Should().BeFalse(); + } + finally + { + SafeDeleteDirectory(root); + } + } + + private static string CreateTempDirectory() + { + var dir = Path.Combine(Path.GetTempPath(), "stellaops-airgap-tests", Guid.NewGuid().ToString("N")); + Directory.CreateDirectory(dir); + return dir; + } + + private static void SafeDeleteDirectory(string path) + { + try + { + if (Directory.Exists(path)) + { + Directory.Delete(path, recursive: true); + } + } + catch + { + // best-effort cleanup + } + } +} + diff --git a/tests/AirGap/StellaOps.AirGap.Importer.Tests/StellaOps.AirGap.Importer.Tests.csproj b/tests/AirGap/StellaOps.AirGap.Importer.Tests/StellaOps.AirGap.Importer.Tests.csproj index 765d890a4..bc42f7312 100644 --- a/tests/AirGap/StellaOps.AirGap.Importer.Tests/StellaOps.AirGap.Importer.Tests.csproj +++ b/tests/AirGap/StellaOps.AirGap.Importer.Tests/StellaOps.AirGap.Importer.Tests.csproj @@ -6,6 +6,7 @@ enable + diff --git a/tests/AirGap/StellaOps.AirGap.Importer.Tests/Validation/ImportValidatorIntegrationTests.cs b/tests/AirGap/StellaOps.AirGap.Importer.Tests/Validation/ImportValidatorIntegrationTests.cs new file mode 100644 index 000000000..001cf7a96 --- /dev/null +++ b/tests/AirGap/StellaOps.AirGap.Importer.Tests/Validation/ImportValidatorIntegrationTests.cs @@ -0,0 +1,204 @@ +using System.Security.Cryptography; +using System.Text; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.AirGap.Importer.Contracts; +using StellaOps.AirGap.Importer.Quarantine; +using StellaOps.AirGap.Importer.Validation; +using StellaOps.AirGap.Importer.Versioning; + +namespace StellaOps.AirGap.Importer.Tests.Validation; + +public sealed class ImportValidatorIntegrationTests +{ + [Fact] + public async Task ValidateAsync_WhenNonMonotonic_ShouldFailAndQuarantine() + { + var quarantine = new CapturingQuarantineService(); + var monotonicity = new FixedMonotonicityChecker(isMonotonic: false); + + var validator = new ImportValidator( + new DsseVerifier(), + new TufMetadataValidator(), + new MerkleRootCalculator(), + new RootRotationPolicy(), + monotonicity, + quarantine, + NullLogger.Instance); + + var tempRoot = Path.Combine(Path.GetTempPath(), "stellaops-airgap-tests", Guid.NewGuid().ToString("N")); + Directory.CreateDirectory(tempRoot); + var bundlePath = Path.Combine(tempRoot, "bundle.tar.zst"); + await File.WriteAllTextAsync(bundlePath, "bundle-bytes"); + + try + { + var (envelope, trustRoots) = CreateValidDsse(); + + var trustStore = new TrustStore(); + trustStore.LoadActive(new Dictionary()); + trustStore.StagePending(new Dictionary { ["pending-key"] = new byte[] { 1, 2, 3 } }); + + var request = new ImportValidationRequest( + TenantId: "tenant-a", + BundleType: "offline-kit", + BundleDigest: "sha256:bundle", + BundlePath: bundlePath, + ManifestJson: "{\"version\":\"1.0.0\"}", + ManifestVersion: "1.0.0", + ManifestCreatedAt: DateTimeOffset.Parse("2025-12-15T00:00:00Z"), + ForceActivate: false, + ForceActivateReason: null, + Envelope: envelope, + TrustRoots: trustRoots, + RootJson: """ + {"version":1,"expiresUtc":"2025-12-31T00:00:00Z"} + """, + SnapshotJson: """ + {"version":1,"expiresUtc":"2025-12-31T00:00:00Z","meta":{"snapshot":{"hashes":{"sha256":"abc"}}}} + """, + TimestampJson: """ + {"version":1,"expiresUtc":"2025-12-31T00:00:00Z","snapshot":{"meta":{"hashes":{"sha256":"abc"}}}} + """, + PayloadEntries: new[] { new NamedStream("payload.txt", new MemoryStream(Encoding.UTF8.GetBytes("hello"))) }, + TrustStore: trustStore, + ApproverIds: new[] { "approver-a", "approver-b" }); + + var result = await validator.ValidateAsync(request); + + result.IsValid.Should().BeFalse(); + result.Reason.Should().Contain("version-non-monotonic"); + + quarantine.Requests.Should().HaveCount(1); + quarantine.Requests[0].TenantId.Should().Be("tenant-a"); + quarantine.Requests[0].ReasonCode.Should().Contain("version-non-monotonic"); + } + finally + { + try + { + Directory.Delete(tempRoot, recursive: true); + } + catch + { + // best-effort cleanup + } + } + } + + private static (DsseEnvelope envelope, TrustRootConfig trustRoots) CreateValidDsse() + { + using var rsa = RSA.Create(2048); + var publicKey = rsa.ExportSubjectPublicKeyInfo(); + + var fingerprint = Convert.ToHexString(SHA256.HashData(publicKey)).ToLowerInvariant(); + var payloadType = "application/vnd.in-toto+json"; + var payloadBytes = Encoding.UTF8.GetBytes("{\"hello\":\"world\"}"); + var payloadBase64 = Convert.ToBase64String(payloadBytes); + + var pae = BuildPae(payloadType, payloadBytes); + var signature = rsa.SignData(pae, HashAlgorithmName.SHA256, RSASignaturePadding.Pss); + + var envelope = new DsseEnvelope( + PayloadType: payloadType, + Payload: payloadBase64, + Signatures: new[] { new DsseSignature("key-1", Convert.ToBase64String(signature)) }); + + var trustRoots = new TrustRootConfig( + RootBundlePath: "(memory)", + TrustedKeyFingerprints: new[] { fingerprint }, + AllowedSignatureAlgorithms: new[] { "rsa-pss-sha256" }, + NotBeforeUtc: null, + NotAfterUtc: null, + PublicKeys: new Dictionary { ["key-1"] = publicKey }); + + return (envelope, trustRoots); + } + + private static byte[] BuildPae(string payloadType, byte[] payloadBytes) + { + const string paePrefix = "DSSEv1"; + var payload = Encoding.UTF8.GetString(payloadBytes); + + var parts = new[] + { + paePrefix, + payloadType, + payload + }; + + var paeBuilder = new StringBuilder(); + paeBuilder.Append("PAE:"); + paeBuilder.Append(parts.Length); + foreach (var part in parts) + { + paeBuilder.Append(' '); + paeBuilder.Append(part.Length); + paeBuilder.Append(' '); + paeBuilder.Append(part); + } + + return Encoding.UTF8.GetBytes(paeBuilder.ToString()); + } + + private sealed class FixedMonotonicityChecker : IVersionMonotonicityChecker + { + private readonly bool _isMonotonic; + + public FixedMonotonicityChecker(bool isMonotonic) + { + _isMonotonic = isMonotonic; + } + + public Task CheckAsync( + string tenantId, + string bundleType, + BundleVersion incomingVersion, + CancellationToken cancellationToken = default) + { + return Task.FromResult(new MonotonicityCheckResult( + IsMonotonic: _isMonotonic, + CurrentVersion: new BundleVersion(2, 0, 0, DateTimeOffset.Parse("2025-12-14T00:00:00Z")), + CurrentBundleDigest: "sha256:current", + CurrentActivatedAt: DateTimeOffset.Parse("2025-12-14T00:00:00Z"), + ReasonCode: _isMonotonic ? "MONOTONIC_OK" : "VERSION_NON_MONOTONIC")); + } + + public Task RecordActivationAsync( + string tenantId, + string bundleType, + BundleVersion version, + string bundleDigest, + bool wasForceActivated = false, + string? forceActivateReason = null, + CancellationToken cancellationToken = default) + { + return Task.CompletedTask; + } + } + + private sealed class CapturingQuarantineService : IQuarantineService + { + public List Requests { get; } = new(); + + public Task QuarantineAsync(QuarantineRequest request, CancellationToken cancellationToken = default) + { + Requests.Add(request); + return Task.FromResult(new QuarantineResult( + Success: true, + QuarantineId: "test", + QuarantinePath: "(memory)", + QuarantinedAt: DateTimeOffset.UnixEpoch)); + } + + public Task> ListAsync(string tenantId, QuarantineListOptions? options = null, CancellationToken cancellationToken = default) => + Task.FromResult>(Array.Empty()); + + public Task RemoveAsync(string tenantId, string quarantineId, string removalReason, CancellationToken cancellationToken = default) => + Task.FromResult(false); + + public Task CleanupExpiredAsync(TimeSpan retentionPeriod, CancellationToken cancellationToken = default) => + Task.FromResult(0); + } +} + diff --git a/tests/AirGap/StellaOps.AirGap.Importer.Tests/Versioning/BundleVersionTests.cs b/tests/AirGap/StellaOps.AirGap.Importer.Tests/Versioning/BundleVersionTests.cs new file mode 100644 index 000000000..b3225df51 --- /dev/null +++ b/tests/AirGap/StellaOps.AirGap.Importer.Tests/Versioning/BundleVersionTests.cs @@ -0,0 +1,79 @@ +using FluentAssertions; +using StellaOps.AirGap.Importer.Versioning; + +namespace StellaOps.AirGap.Importer.Tests.Versioning; + +public sealed class BundleVersionTests +{ + [Fact] + public void Parse_ShouldParseSemVer() + { + var createdAt = new DateTimeOffset(2025, 12, 14, 0, 0, 0, TimeSpan.Zero); + var version = BundleVersion.Parse("1.2.3", createdAt); + + version.Major.Should().Be(1); + version.Minor.Should().Be(2); + version.Patch.Should().Be(3); + version.Prerelease.Should().BeNull(); + version.CreatedAt.Should().Be(createdAt); + version.SemVer.Should().Be("1.2.3"); + } + + [Fact] + public void Parse_ShouldParsePrerelease() + { + var createdAt = new DateTimeOffset(2025, 12, 14, 0, 0, 0, TimeSpan.Zero); + var version = BundleVersion.Parse("1.2.3-edge.1", createdAt); + + version.SemVer.Should().Be("1.2.3-edge.1"); + version.Prerelease.Should().Be("edge.1"); + } + + [Fact] + public void IsNewerThan_ShouldCompareMajorMinorPatch() + { + var a = new BundleVersion(1, 2, 3, DateTimeOffset.UnixEpoch); + var b = new BundleVersion(2, 0, 0, DateTimeOffset.UnixEpoch); + b.IsNewerThan(a).Should().BeTrue(); + a.IsNewerThan(b).Should().BeFalse(); + } + + [Fact] + public void IsNewerThan_ShouldTreatReleaseAsNewerThanPrerelease() + { + var now = new DateTimeOffset(2025, 12, 14, 0, 0, 0, TimeSpan.Zero); + var prerelease = new BundleVersion(1, 2, 3, now, "alpha"); + var release = new BundleVersion(1, 2, 3, now, null); + + release.IsNewerThan(prerelease).Should().BeTrue(); + prerelease.IsNewerThan(release).Should().BeFalse(); + } + + [Fact] + public void IsNewerThan_ShouldOrderPrereleaseIdentifiers() + { + var now = new DateTimeOffset(2025, 12, 14, 0, 0, 0, TimeSpan.Zero); + var alpha = new BundleVersion(1, 2, 3, now, "alpha"); + var beta = new BundleVersion(1, 2, 3, now, "beta"); + var rc1 = new BundleVersion(1, 2, 3, now, "rc.1"); + var rc2 = new BundleVersion(1, 2, 3, now, "rc.2"); + + beta.IsNewerThan(alpha).Should().BeTrue(); + rc1.IsNewerThan(beta).Should().BeTrue(); + rc2.IsNewerThan(rc1).Should().BeTrue(); + } + + [Fact] + public void IsNewerThan_ShouldUseCreatedAtAsTiebreaker() + { + var earlier = new DateTimeOffset(2025, 12, 14, 0, 0, 0, TimeSpan.Zero); + var later = earlier.AddMinutes(1); + + var a = new BundleVersion(1, 2, 3, earlier, "edge"); + var b = new BundleVersion(1, 2, 3, later, "edge"); + + b.IsNewerThan(a).Should().BeTrue(); + a.IsNewerThan(b).Should().BeFalse(); + } +} + diff --git a/tests/AirGap/StellaOps.AirGap.Importer.Tests/Versioning/VersionMonotonicityCheckerTests.cs b/tests/AirGap/StellaOps.AirGap.Importer.Tests/Versioning/VersionMonotonicityCheckerTests.cs new file mode 100644 index 000000000..eae6791ef --- /dev/null +++ b/tests/AirGap/StellaOps.AirGap.Importer.Tests/Versioning/VersionMonotonicityCheckerTests.cs @@ -0,0 +1,157 @@ +using FluentAssertions; +using StellaOps.AirGap.Importer.Versioning; + +namespace StellaOps.AirGap.Importer.Tests.Versioning; + +public sealed class VersionMonotonicityCheckerTests +{ + [Fact] + public async Task CheckAsync_WhenNoCurrent_ShouldBeFirstActivation() + { + var store = new InMemoryBundleVersionStore(); + var checker = new VersionMonotonicityChecker(store, new FixedTimeProvider(DateTimeOffset.Parse("2025-12-14T00:00:00Z"))); + + var incoming = BundleVersion.Parse("1.0.0", DateTimeOffset.Parse("2025-12-14T00:00:00Z")); + var result = await checker.CheckAsync("tenant-a", "offline-kit", incoming); + + result.IsMonotonic.Should().BeTrue(); + result.ReasonCode.Should().Be("FIRST_ACTIVATION"); + result.CurrentVersion.Should().BeNull(); + result.CurrentBundleDigest.Should().BeNull(); + } + + [Fact] + public async Task CheckAsync_WhenOlder_ShouldBeNonMonotonic() + { + var store = new InMemoryBundleVersionStore(); + await store.UpsertAsync(new BundleVersionRecord( + TenantId: "tenant-a", + BundleType: "offline-kit", + VersionString: "2.0.0", + Major: 2, + Minor: 0, + Patch: 0, + Prerelease: null, + BundleCreatedAt: DateTimeOffset.Parse("2025-12-14T00:00:00Z"), + BundleDigest: "sha256:current", + ActivatedAt: DateTimeOffset.Parse("2025-12-14T00:00:00Z"), + WasForceActivated: false, + ForceActivateReason: null)); + + var checker = new VersionMonotonicityChecker(store, new FixedTimeProvider(DateTimeOffset.Parse("2025-12-14T00:00:00Z"))); + var incoming = BundleVersion.Parse("1.0.0", DateTimeOffset.Parse("2025-12-14T00:00:00Z")); + + var result = await checker.CheckAsync("tenant-a", "offline-kit", incoming); + + result.IsMonotonic.Should().BeFalse(); + result.ReasonCode.Should().Be("VERSION_NON_MONOTONIC"); + result.CurrentVersion.Should().NotBeNull(); + result.CurrentVersion!.SemVer.Should().Be("2.0.0"); + } + + [Fact] + public async Task RecordActivationAsync_WhenNonMonotonicWithoutForce_ShouldThrow() + { + var store = new InMemoryBundleVersionStore(); + await store.UpsertAsync(new BundleVersionRecord( + TenantId: "tenant-a", + BundleType: "offline-kit", + VersionString: "2.0.0", + Major: 2, + Minor: 0, + Patch: 0, + Prerelease: null, + BundleCreatedAt: DateTimeOffset.Parse("2025-12-14T00:00:00Z"), + BundleDigest: "sha256:current", + ActivatedAt: DateTimeOffset.Parse("2025-12-14T00:00:00Z"), + WasForceActivated: false, + ForceActivateReason: null)); + + var checker = new VersionMonotonicityChecker(store, new FixedTimeProvider(DateTimeOffset.Parse("2025-12-15T00:00:00Z"))); + var incoming = BundleVersion.Parse("1.0.0", DateTimeOffset.Parse("2025-12-15T00:00:00Z")); + + var act = () => checker.RecordActivationAsync("tenant-a", "offline-kit", incoming, "sha256:new"); + await act.Should().ThrowAsync(); + } + + [Fact] + public async Task RecordActivationAsync_WhenForced_ShouldWriteForceFields() + { + var store = new InMemoryBundleVersionStore(); + await store.UpsertAsync(new BundleVersionRecord( + TenantId: "tenant-a", + BundleType: "offline-kit", + VersionString: "2.0.0", + Major: 2, + Minor: 0, + Patch: 0, + Prerelease: null, + BundleCreatedAt: DateTimeOffset.Parse("2025-12-14T00:00:00Z"), + BundleDigest: "sha256:current", + ActivatedAt: DateTimeOffset.Parse("2025-12-14T00:00:00Z"), + WasForceActivated: false, + ForceActivateReason: null)); + + var checker = new VersionMonotonicityChecker(store, new FixedTimeProvider(DateTimeOffset.Parse("2025-12-15T00:00:00Z"))); + var incoming = BundleVersion.Parse("1.0.0", DateTimeOffset.Parse("2025-12-15T00:00:00Z")); + + await checker.RecordActivationAsync( + "tenant-a", + "offline-kit", + incoming, + "sha256:new", + wasForceActivated: true, + forceActivateReason: "manual rollback permitted"); + + var current = await store.GetCurrentAsync("tenant-a", "offline-kit"); + current.Should().NotBeNull(); + current!.WasForceActivated.Should().BeTrue(); + current.ForceActivateReason.Should().Be("manual rollback permitted"); + current.BundleDigest.Should().Be("sha256:new"); + } + + private sealed class InMemoryBundleVersionStore : IBundleVersionStore + { + private BundleVersionRecord? _current; + private readonly List _history = new(); + + public Task GetCurrentAsync(string tenantId, string bundleType, CancellationToken ct = default) + { + return Task.FromResult(_current is not null && + _current.TenantId.Equals(tenantId, StringComparison.Ordinal) && + _current.BundleType.Equals(bundleType, StringComparison.Ordinal) + ? _current + : null); + } + + public Task UpsertAsync(BundleVersionRecord record, CancellationToken ct = default) + { + _current = record; + _history.Insert(0, record); + return Task.CompletedTask; + } + + public Task> GetHistoryAsync(string tenantId, string bundleType, int limit = 10, CancellationToken ct = default) + { + var items = _history + .Where(r => r.TenantId.Equals(tenantId, StringComparison.Ordinal) && r.BundleType.Equals(bundleType, StringComparison.Ordinal)) + .Take(limit) + .ToArray(); + + return Task.FromResult>(items); + } + } + + private sealed class FixedTimeProvider : TimeProvider + { + private readonly DateTimeOffset _utcNow; + + public FixedTimeProvider(DateTimeOffset utcNow) + { + _utcNow = utcNow; + } + + public override DateTimeOffset GetUtcNow() => _utcNow; + } +} +