diff --git a/docs/19_TEST_SUITE_OVERVIEW.md b/docs/19_TEST_SUITE_OVERVIEW.md
index edd03616e..24874c605 100755
--- a/docs/19_TEST_SUITE_OVERVIEW.md
+++ b/docs/19_TEST_SUITE_OVERVIEW.md
@@ -1,7 +1,7 @@
-# Automated Test‑Suite Overview
+# Automated Test-Suite Overview
-This document enumerates **every automated check** executed by the Stella Ops
-CI pipeline, from unit level to chaos experiments. It is intended for
+This document enumerates **every automated check** executed by the Stella Ops
+CI pipeline, from unit level to chaos experiments. It is intended for
contributors who need to extend coverage or diagnose failures.
> **Build parameters** – values such as `{{ dotnet }}` (runtime) and
@@ -9,40 +9,81 @@ contributors who need to extend coverage or diagnose failures.
---
-## Layer map
+## Test Philosophy
-| Layer | Tooling | Entry‑point | Frequency |
-|-------|---------|-------------|-----------|
-| **1. Unit** | `xUnit` (dotnet test) | `*.Tests.csproj` | per PR / push |
-| **2. Property‑based** | `FsCheck` | `SbomPropertyTests` | per PR |
-| **3. Integration (API)** | `Testcontainers` suite | `test/Api.Integration` | per PR + nightly |
-| **4. Integration (DB-merge)** | Testcontainers PostgreSQL + Redis | `Concelier.Integration` (vulnerability ingest/merge/export service) | per PR |
-| **5. Contract (gRPC)** | `Buf breaking` | `buf.yaml` files | per PR |
-| **6. Front‑end unit** | `Jest` | `ui/src/**/*.spec.ts` | per PR |
-| **7. Front‑end E2E** | `Playwright` | `ui/e2e/**` | nightly |
-| **8. Lighthouse perf / a11y** | `lighthouse-ci` (Chrome headless) | `ui/dist/index.html` | nightly |
-| **9. Load** | `k6` scripted scenarios | `k6/*.js` | nightly |
-| **10. Chaos CPU / OOM** | `pumba` | Docker Compose overlay | weekly |
-| **11. Dependency scanning** | `Trivy fs` + `dotnet list package --vuln` | root | per PR |
-| **12. License compliance** | `LicenceFinder` | root | per PR |
-| **13. SBOM reproducibility** | `in‑toto attestation` diff | GitLab job | release tags |
+### Core Principles
+
+1. **Determinism as Contract**: Scan verdicts must be reproducible. Same inputs → byte-identical outputs.
+2. **Offline by Default**: Every test (except explicitly tagged "online") runs without network access.
+3. **Evidence-First Validation**: Assertions verify the complete evidence chain, not just pass/fail.
+4. **Interop is Required**: Compatibility with ecosystem tools (Syft, Grype, Trivy, cosign) blocks releases.
+5. **Coverage by Risk**: Prioritize testing high-risk paths over line coverage metrics.
+
+### Test Boundaries
+
+- **Lattice/policy merge** algorithms run in `scanner.webservice`
+- **Concelier/Excitors** preserve prune source (no conflict resolution)
+- Tests enforce these boundaries explicitly
---
-## Quality gates
+## Layer Map
+
+| Layer | Tooling | Entry-point | Frequency |
+|-------|---------|-------------|-----------|
+| **1. Unit** | `xUnit` (dotnet test) | `*.Tests.csproj` | per PR / push |
+| **2. Property-based** | `FsCheck` | `SbomPropertyTests`, `Canonicalization` | per PR |
+| **3. Integration (API)** | `Testcontainers` suite | `test/Api.Integration` | per PR + nightly |
+| **4. Integration (DB-merge)** | Testcontainers PostgreSQL + Valkey | `Concelier.Integration` | per PR |
+| **5. Contract (OpenAPI)** | Schema validation | `docs/api/*.yaml` | per PR |
+| **6. Front-end unit** | `Jest` | `ui/src/**/*.spec.ts` | per PR |
+| **7. Front-end E2E** | `Playwright` | `ui/e2e/**` | nightly |
+| **8. Lighthouse perf / a11y** | `lighthouse-ci` (Chrome headless) | `ui/dist/index.html` | nightly |
+| **9. Load** | `k6` scripted scenarios | `tests/load/*.js` | nightly |
+| **10. Chaos** | `pumba`, custom harness | `tests/chaos/` | weekly |
+| **11. Interop** | Syft/Grype/cosign | `tests/interop/` | nightly |
+| **12. Offline E2E** | Network-isolated containers | `tests/offline/` | nightly |
+| **13. Replay Verification** | Golden corpus replay | `bench/golden-corpus/` | per PR |
+| **14. Dependency scanning** | `Trivy fs` + `dotnet list package --vuln` | root | per PR |
+| **15. License compliance** | `LicenceFinder` | root | per PR |
+| **16. SBOM reproducibility** | `in-toto attestation` diff | GitLab job | release tags |
+
+---
+
+## Test Categories (xUnit Traits)
+
+```csharp
+[Trait("Category", "Unit")] // Fast, isolated unit tests
+[Trait("Category", "Integration")] // Tests requiring infrastructure
+[Trait("Category", "E2E")] // Full end-to-end workflows
+[Trait("Category", "AirGap")] // Must work without network
+[Trait("Category", "Interop")] // Third-party tool compatibility
+[Trait("Category", "Performance")] // Performance benchmarks
+[Trait("Category", "Chaos")] // Failure injection tests
+[Trait("Category", "Security")] // Security-focused tests
+```
+
+---
+
+## Quality Gates
| Metric | Budget | Gate |
|--------|--------|------|
-| API unit coverage | ≥ 85 % lines | PR merge |
-| API response P95 | ≤ 120 ms | nightly alert |
-| Δ‑SBOM warm scan P95 (4 vCPU) | ≤ 5 s | nightly alert |
-| Lighthouse performance score | ≥ 90 | nightly alert |
-| Lighthouse accessibility score | ≥ 95 | nightly alert |
-| k6 sustained RPS drop | < 5 % vs baseline | nightly alert |
+| API unit coverage | ≥ 85% lines | PR merge |
+| API response P95 | ≤ 120 ms | nightly alert |
+| Δ-SBOM warm scan P95 (4 vCPU) | ≤ 5 s | nightly alert |
+| Lighthouse performance score | ≥ 90 | nightly alert |
+| Lighthouse accessibility score | ≥ 95 | nightly alert |
+| k6 sustained RPS drop | < 5% vs baseline | nightly alert |
+| **Replay determinism** | 0 byte diff | **Release** |
+| **Interop findings parity** | ≥ 95% | **Release** |
+| **Offline E2E** | All pass with no network | **Release** |
+| **Unknowns budget (prod)** | ≤ configured limit | **Release** |
+| **Router Retry-After compliance** | 100% | Nightly |
---
-## Local runner
+## Local Runner
```bash
# minimal run: unit + property + frontend tests
@@ -50,21 +91,26 @@ contributors who need to extend coverage or diagnose failures.
# full stack incl. Playwright and lighthouse
./scripts/dev-test.sh --full
-````
-The script spins up PostgreSQL/Redis via Testcontainers and requires:
+# category-specific
+dotnet test --filter "Category=Unit"
+dotnet test --filter "Category=AirGap"
+dotnet test --filter "Category=Interop"
+```
+
+The script spins up PostgreSQL/Valkey via Testcontainers and requires:
* Docker ≥ 25
* Node 20 (for Jest/Playwright)
-#### PostgreSQL Testcontainers
+### PostgreSQL Testcontainers
Multiple suites (Concelier connectors, Excititor worker/WebService, Scheduler)
use Testcontainers with PostgreSQL for integration tests. If you don't have
Docker available, tests can also run against a local PostgreSQL instance
listening on `127.0.0.1:5432`.
-#### Local PostgreSQL helper
+### Local PostgreSQL Helper
Some suites (Concelier WebService/Core, Exporter JSON) need a full
PostgreSQL instance when you want to debug or inspect data with `psql`.
@@ -84,9 +130,59 @@ By default the script uses Docker to run PostgreSQL 16, binds to
connection string is printed on start and you can export it before
running `dotnet test` if a suite supports overriding its connection string.
----
+---
-### Concelier OSV↔GHSA parity fixtures
+## New Test Infrastructure (Epic 5100)
+
+### Run Manifest & Replay
+
+Every scan captures a **Run Manifest** containing all inputs (artifact digests, feed versions, policy versions, PRNG seed). This enables deterministic replay:
+
+```bash
+# Replay a scan from manifest
+stella replay --manifest run-manifest.json --output verdict.json
+
+# Verify determinism
+stella replay verify --manifest run-manifest.json
+```
+
+### Evidence Index
+
+The **Evidence Index** links verdicts to their supporting evidence chain:
+- Verdict → SBOM digests → Attestation IDs → Tool versions
+
+### Golden Corpus
+
+Located at `bench/golden-corpus/`, contains 50+ test cases:
+- Severity levels (Critical, High, Medium, Low)
+- VEX scenarios (Not Affected, Affected, Conflicting)
+- Reachability cases (Reachable, Not Reachable, Inconclusive)
+- Unknowns scenarios
+- Scale tests (200 to 50k+ packages)
+- Multi-distro (Alpine, Debian, RHEL, SUSE, Ubuntu)
+- Interop fixtures (Syft-generated, Trivy-generated)
+- Negative cases (malformed inputs)
+
+### Offline Testing
+
+Inherit from `NetworkIsolatedTestBase` for air-gap compliance:
+
+```csharp
+[Trait("Category", "AirGap")]
+public class OfflineTests : NetworkIsolatedTestBase
+{
+ [Fact]
+ public async Task Test_WorksOffline()
+ {
+ // Test implementation
+ AssertNoNetworkCalls(); // Fails if network accessed
+ }
+}
+```
+
+---
+
+## Concelier OSV↔GHSA Parity Fixtures
The Concelier connector suite includes a regression test (`OsvGhsaParityRegressionTests`)
that checks a curated set of GHSA identifiers against OSV responses. The fixture
@@ -104,7 +200,7 @@ fixtures stay stable across machines.
---
-## CI job layout
+## CI Job Layout
```mermaid
flowchart LR
@@ -115,21 +211,42 @@ flowchart LR
I1 --> FE[Jest]
FE --> E2E[Playwright]
E2E --> Lighthouse
+
+ subgraph release-gates
+ REPLAY[Replay Verify]
+ INTEROP[Interop E2E]
+ OFFLINE[Offline E2E]
+ BUDGET[Unknowns Gate]
+ end
+
Lighthouse --> INTEG2[Concelier]
INTEG2 --> LOAD[k6]
- LOAD --> CHAOS[pumba]
+ LOAD --> CHAOS[Chaos Suite]
CHAOS --> RELEASE[Attestation diff]
+
+ RELEASE --> release-gates
```
---
-## Adding a new test layer
+## Adding a New Test Layer
1. Extend `scripts/dev-test.sh` so local contributors get the layer by default.
-2. Add a dedicated GitLab job in `.gitlab-ci.yml` (stage `test` or `nightly`).
+2. Add a dedicated workflow in `.gitea/workflows/` (or GitLab job in `.gitlab-ci.yml`).
3. Register the job in `docs/19_TEST_SUITE_OVERVIEW.md` *and* list its metric
in `docs/metrics/README.md`.
+4. If the test requires network isolation, inherit from `NetworkIsolatedTestBase`.
+5. If the test uses golden corpus, add cases to `bench/golden-corpus/`.
---
-*Last updated {{ "now" | date: "%Y‑%m‑%d" }}*
+## Related Documentation
+
+- [Sprint Epic 5100 - Testing Strategy](implplan/SPRINT_5100_SUMMARY.md)
+- [tests/AGENTS.md](../tests/AGENTS.md)
+- [Offline Operation Guide](24_OFFLINE_KIT.md)
+- [Module Architecture Dossiers](modules/)
+
+---
+
+*Last updated 2025-12-21*
diff --git a/docs/db/schemas/binaries_schema_specification.md b/docs/db/schemas/binaries_schema_specification.md
new file mode 100644
index 000000000..0655f9f37
--- /dev/null
+++ b/docs/db/schemas/binaries_schema_specification.md
@@ -0,0 +1,680 @@
+# Binaries Schema Specification
+
+**Version:** 1.0.0
+**Status:** DRAFT
+**Owner:** BinaryIndex Module
+**Last Updated:** 2025-12-21
+
+---
+
+## 1. Overview
+
+The `binaries` schema stores binary identity, vulnerability mappings, fingerprints, and patch-aware fix status for the BinaryIndex module. This enables detection of vulnerable binaries independent of package metadata.
+
+## 2. Schema Definition
+
+```sql
+-- ============================================================================
+-- BINARIES SCHEMA
+-- ============================================================================
+-- Purpose: Binary identity, fingerprint, and vulnerability mapping for
+-- the BinaryIndex module (vulnerable binaries database).
+-- ============================================================================
+
+CREATE SCHEMA IF NOT EXISTS binaries;
+CREATE SCHEMA IF NOT EXISTS binaries_app;
+
+-- ----------------------------------------------------------------------------
+-- RLS Helper Function
+-- ----------------------------------------------------------------------------
+
+CREATE OR REPLACE FUNCTION binaries_app.require_current_tenant()
+RETURNS TEXT
+LANGUAGE plpgsql STABLE SECURITY DEFINER
+AS $$
+DECLARE
+ v_tenant TEXT;
+BEGIN
+ v_tenant := current_setting('app.tenant_id', true);
+ IF v_tenant IS NULL OR v_tenant = '' THEN
+ RAISE EXCEPTION 'app.tenant_id session variable not set';
+ END IF;
+ RETURN v_tenant;
+END;
+$$;
+
+-- ============================================================================
+-- CORE IDENTITY TABLES
+-- ============================================================================
+
+-- ----------------------------------------------------------------------------
+-- Table: binary_identity
+-- Purpose: Known binary identities extracted from packages
+-- ----------------------------------------------------------------------------
+
+CREATE TABLE binaries.binary_identity (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ tenant_id UUID NOT NULL,
+
+ -- Primary identity (Build-ID preferred for ELF)
+ binary_key TEXT NOT NULL, -- build_id || file_sha256 (normalized)
+ build_id TEXT, -- ELF GNU Build-ID (hex)
+ build_id_type TEXT CHECK (build_id_type IN ('gnu-build-id', 'pe-cv', 'macho-uuid')),
+
+ -- Hashes
+ file_sha256 TEXT NOT NULL, -- sha256 of entire file
+ text_sha256 TEXT, -- sha256 of .text section (ELF)
+ blake3_hash TEXT, -- Optional faster hash
+
+ -- Binary metadata
+ format TEXT NOT NULL CHECK (format IN ('elf', 'pe', 'macho')),
+ architecture TEXT NOT NULL, -- x86-64, aarch64, arm, etc.
+ osabi TEXT, -- linux, windows, darwin
+ binary_type TEXT CHECK (binary_type IN ('executable', 'shared_library', 'static_library', 'object')),
+ is_stripped BOOLEAN DEFAULT FALSE,
+
+ -- Tracking
+ first_seen_snapshot_id UUID,
+ last_seen_snapshot_id UUID,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+
+ CONSTRAINT binary_identity_key_unique UNIQUE (tenant_id, binary_key)
+);
+
+-- ----------------------------------------------------------------------------
+-- Table: binary_package_map
+-- Purpose: Maps binaries to source packages (per snapshot)
+-- ----------------------------------------------------------------------------
+
+CREATE TABLE binaries.binary_package_map (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ tenant_id UUID NOT NULL,
+
+ -- Binary reference
+ binary_identity_id UUID NOT NULL REFERENCES binaries.binary_identity(id) ON DELETE CASCADE,
+ binary_key TEXT NOT NULL,
+
+ -- Package info
+ distro TEXT NOT NULL, -- debian, ubuntu, rhel, alpine
+ release TEXT NOT NULL, -- bookworm, jammy, 9, 3.19
+ source_pkg TEXT NOT NULL, -- Source package name (e.g., openssl)
+ binary_pkg TEXT NOT NULL, -- Binary package name (e.g., libssl3)
+ pkg_version TEXT NOT NULL, -- Full distro version (e.g., 1.1.1n-0+deb11u5)
+ pkg_purl TEXT, -- PURL if derivable
+ architecture TEXT NOT NULL,
+
+ -- File location
+ file_path_in_pkg TEXT NOT NULL, -- /usr/lib/x86_64-linux-gnu/libssl.so.3
+
+ -- Snapshot reference
+ snapshot_id UUID NOT NULL,
+
+ -- Metadata
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+
+ CONSTRAINT binary_package_map_unique UNIQUE (binary_identity_id, snapshot_id, file_path_in_pkg)
+);
+
+-- ----------------------------------------------------------------------------
+-- Table: corpus_snapshots
+-- Purpose: Tracks corpus ingestion snapshots
+-- ----------------------------------------------------------------------------
+
+CREATE TABLE binaries.corpus_snapshots (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ tenant_id UUID NOT NULL,
+
+ -- Snapshot identification
+ distro TEXT NOT NULL,
+ release TEXT NOT NULL,
+ architecture TEXT NOT NULL,
+ snapshot_id TEXT NOT NULL, -- Unique snapshot identifier
+
+ -- Content tracking
+ packages_processed INT NOT NULL DEFAULT 0,
+ binaries_indexed INT NOT NULL DEFAULT 0,
+ repo_metadata_digest TEXT, -- SHA-256 of repo metadata
+
+ -- Signing
+ signing_key_id TEXT,
+ dsse_envelope_ref TEXT, -- RustFS reference to DSSE envelope
+
+ -- Status
+ status TEXT NOT NULL DEFAULT 'pending' CHECK (status IN ('pending', 'processing', 'completed', 'failed')),
+ error TEXT,
+
+ -- Timestamps
+ started_at TIMESTAMPTZ,
+ completed_at TIMESTAMPTZ,
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+
+ CONSTRAINT corpus_snapshots_unique UNIQUE (tenant_id, distro, release, architecture, snapshot_id)
+);
+
+-- ============================================================================
+-- VULNERABILITY MAPPING TABLES
+-- ============================================================================
+
+-- ----------------------------------------------------------------------------
+-- Table: vulnerable_buildids
+-- Purpose: Build-IDs known to be associated with vulnerable packages
+-- ----------------------------------------------------------------------------
+
+CREATE TABLE binaries.vulnerable_buildids (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ tenant_id UUID NOT NULL,
+
+ -- Build-ID reference
+ buildid_type TEXT NOT NULL CHECK (buildid_type IN ('gnu-build-id', 'pe-cv', 'macho-uuid')),
+ buildid_value TEXT NOT NULL, -- Hex string
+
+ -- Package info
+ purl TEXT NOT NULL, -- Package URL
+ pkg_version TEXT NOT NULL,
+ distro TEXT,
+ release TEXT,
+
+ -- Confidence
+ confidence TEXT NOT NULL DEFAULT 'exact' CHECK (confidence IN ('exact', 'inferred', 'heuristic')),
+
+ -- Provenance
+ provenance JSONB DEFAULT '{}',
+ snapshot_id UUID REFERENCES binaries.corpus_snapshots(id),
+
+ -- Tracking
+ indexed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+
+ CONSTRAINT vulnerable_buildids_unique UNIQUE (tenant_id, buildid_value, buildid_type, purl, pkg_version)
+);
+
+-- ----------------------------------------------------------------------------
+-- Table: binary_vuln_assertion
+-- Purpose: CVE status assertions for specific binaries
+-- ----------------------------------------------------------------------------
+
+CREATE TABLE binaries.binary_vuln_assertion (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ tenant_id UUID NOT NULL,
+
+ -- Binary reference
+ binary_key TEXT NOT NULL,
+ binary_identity_id UUID REFERENCES binaries.binary_identity(id),
+
+ -- CVE reference
+ cve_id TEXT NOT NULL,
+ advisory_id UUID, -- Reference to vuln.advisories
+
+ -- Status
+ status TEXT NOT NULL CHECK (status IN ('affected', 'not_affected', 'fixed', 'unknown')),
+
+ -- Method used to determine status
+ method TEXT NOT NULL CHECK (method IN ('range_match', 'buildid_catalog', 'fingerprint_match', 'fix_index')),
+ confidence NUMERIC(3,2) CHECK (confidence >= 0 AND confidence <= 1),
+
+ -- Evidence
+ evidence_ref TEXT, -- RustFS reference to evidence bundle
+ evidence_digest TEXT, -- SHA-256 of evidence
+
+ -- Tracking
+ evaluated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+
+ CONSTRAINT binary_vuln_assertion_unique UNIQUE (tenant_id, binary_key, cve_id)
+);
+
+-- ============================================================================
+-- FIX INDEX TABLES (Patch-Aware Backport Handling)
+-- ============================================================================
+
+-- ----------------------------------------------------------------------------
+-- Table: cve_fix_evidence
+-- Purpose: Raw evidence of CVE fixes (append-only)
+-- ----------------------------------------------------------------------------
+
+CREATE TABLE binaries.cve_fix_evidence (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ tenant_id UUID NOT NULL,
+
+ -- Key fields
+ distro TEXT NOT NULL,
+ release TEXT NOT NULL,
+ source_pkg TEXT NOT NULL,
+ cve_id TEXT NOT NULL,
+
+ -- Fix information
+ state TEXT NOT NULL CHECK (state IN ('fixed', 'vulnerable', 'not_affected', 'wontfix', 'unknown')),
+ fixed_version TEXT, -- Distro version string (nullable for not_affected)
+
+ -- Method and confidence
+ method TEXT NOT NULL CHECK (method IN ('security_feed', 'changelog', 'patch_header', 'upstream_patch_match')),
+ confidence NUMERIC(3,2) NOT NULL CHECK (confidence >= 0 AND confidence <= 1),
+
+ -- Evidence details
+ evidence JSONB NOT NULL, -- Method-specific evidence payload
+
+ -- Snapshot reference
+ snapshot_id UUID REFERENCES binaries.corpus_snapshots(id),
+
+ -- Tracking
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
+);
+
+-- ----------------------------------------------------------------------------
+-- Table: cve_fix_index
+-- Purpose: Merged best-record for CVE fix status per distro/package
+-- ----------------------------------------------------------------------------
+
+CREATE TABLE binaries.cve_fix_index (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ tenant_id UUID NOT NULL,
+
+ -- Key fields
+ distro TEXT NOT NULL,
+ release TEXT NOT NULL,
+ source_pkg TEXT NOT NULL,
+ cve_id TEXT NOT NULL,
+ architecture TEXT, -- NULL means all architectures
+
+ -- Fix status
+ state TEXT NOT NULL CHECK (state IN ('fixed', 'vulnerable', 'not_affected', 'wontfix', 'unknown')),
+ fixed_version TEXT,
+
+ -- Merge metadata
+ primary_method TEXT NOT NULL, -- Method of highest-confidence evidence
+ confidence NUMERIC(3,2) NOT NULL,
+ evidence_ids UUID[], -- References to cve_fix_evidence
+
+ -- Tracking
+ computed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+
+ CONSTRAINT cve_fix_index_unique UNIQUE (tenant_id, distro, release, source_pkg, cve_id, architecture)
+);
+
+-- ============================================================================
+-- FINGERPRINT TABLES
+-- ============================================================================
+
+-- ----------------------------------------------------------------------------
+-- Table: vulnerable_fingerprints
+-- Purpose: Function fingerprints for CVE detection
+-- ----------------------------------------------------------------------------
+
+CREATE TABLE binaries.vulnerable_fingerprints (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ tenant_id UUID NOT NULL,
+
+ -- CVE and component
+ cve_id TEXT NOT NULL,
+ component TEXT NOT NULL, -- e.g., openssl, glibc
+ purl TEXT, -- Package URL if applicable
+
+ -- Fingerprint data
+ algorithm TEXT NOT NULL CHECK (algorithm IN ('basic_block', 'control_flow_graph', 'string_refs', 'combined')),
+ fingerprint_id TEXT NOT NULL, -- Unique ID (e.g., "bb-abc123...")
+ fingerprint_hash BYTEA NOT NULL, -- Raw fingerprint bytes (16-32 bytes)
+ architecture TEXT NOT NULL, -- x86-64, aarch64
+
+ -- Function hints
+ function_name TEXT, -- Original function name if known
+ source_file TEXT, -- Source file path
+ source_line INT,
+
+ -- Confidence and validation
+ similarity_threshold NUMERIC(3,2) DEFAULT 0.95,
+ confidence NUMERIC(3,2) CHECK (confidence >= 0 AND confidence <= 1),
+ validated BOOLEAN DEFAULT FALSE,
+ validation_stats JSONB DEFAULT '{}', -- precision, recall, etc.
+
+ -- Reference builds
+ vuln_build_ref TEXT, -- RustFS ref to vulnerable reference build
+ fixed_build_ref TEXT, -- RustFS ref to fixed reference build
+
+ -- Metadata
+ notes TEXT,
+ evidence_ref TEXT, -- RustFS ref to evidence bundle
+
+ -- Tracking
+ indexed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+
+ CONSTRAINT vulnerable_fingerprints_unique UNIQUE (tenant_id, cve_id, algorithm, fingerprint_id, architecture)
+);
+
+-- ----------------------------------------------------------------------------
+-- Table: fingerprint_corpus_metadata
+-- Purpose: Tracks which packages have been fingerprinted
+-- ----------------------------------------------------------------------------
+
+CREATE TABLE binaries.fingerprint_corpus_metadata (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ tenant_id UUID NOT NULL,
+
+ -- Package identification
+ purl TEXT NOT NULL,
+ version TEXT NOT NULL,
+
+ -- Fingerprinting info
+ algorithm TEXT NOT NULL,
+ binary_digest TEXT, -- sha256 of the binary analyzed
+
+ -- Statistics
+ function_count INT NOT NULL DEFAULT 0,
+ fingerprints_indexed INT NOT NULL DEFAULT 0,
+
+ -- Provenance
+ indexed_by TEXT, -- Service/user that indexed
+ indexed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+
+ -- Tracking
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+
+ CONSTRAINT fingerprint_corpus_metadata_unique UNIQUE (tenant_id, purl, version, algorithm)
+);
+
+-- ============================================================================
+-- MATCH RESULTS TABLES
+-- ============================================================================
+
+-- ----------------------------------------------------------------------------
+-- Table: fingerprint_matches
+-- Purpose: Records fingerprint matches during scans
+-- ----------------------------------------------------------------------------
+
+CREATE TABLE binaries.fingerprint_matches (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ tenant_id UUID NOT NULL,
+
+ -- Scan reference
+ scan_id UUID NOT NULL, -- Reference to scanner.scan_manifest
+
+ -- Match details
+ match_type TEXT NOT NULL CHECK (match_type IN ('fingerprint', 'buildid', 'hash_exact')),
+ binary_key TEXT NOT NULL,
+ binary_identity_id UUID REFERENCES binaries.binary_identity(id),
+
+ -- Vulnerable package
+ vulnerable_purl TEXT NOT NULL,
+ vulnerable_version TEXT NOT NULL,
+
+ -- Fingerprint match specifics (nullable for non-fingerprint matches)
+ matched_fingerprint_id UUID REFERENCES binaries.vulnerable_fingerprints(id),
+ matched_function TEXT,
+ similarity NUMERIC(3,2), -- 0.00-1.00
+
+ -- CVE linkage
+ advisory_ids TEXT[], -- Linked CVE/GHSA IDs
+
+ -- Reachability (populated later by Scanner)
+ reachability_status TEXT CHECK (reachability_status IN ('reachable', 'unreachable', 'unknown', 'partial')),
+
+ -- Evidence
+ evidence JSONB DEFAULT '{}',
+
+ -- Tracking
+ matched_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
+);
+
+-- ============================================================================
+-- INDEXES
+-- ============================================================================
+
+-- binary_identity indexes
+CREATE INDEX idx_binary_identity_tenant ON binaries.binary_identity(tenant_id);
+CREATE INDEX idx_binary_identity_buildid ON binaries.binary_identity(build_id) WHERE build_id IS NOT NULL;
+CREATE INDEX idx_binary_identity_sha256 ON binaries.binary_identity(file_sha256);
+CREATE INDEX idx_binary_identity_key ON binaries.binary_identity(binary_key);
+
+-- binary_package_map indexes
+CREATE INDEX idx_binary_package_map_tenant ON binaries.binary_package_map(tenant_id);
+CREATE INDEX idx_binary_package_map_binary ON binaries.binary_package_map(binary_identity_id);
+CREATE INDEX idx_binary_package_map_distro ON binaries.binary_package_map(distro, release, source_pkg);
+CREATE INDEX idx_binary_package_map_snapshot ON binaries.binary_package_map(snapshot_id);
+CREATE INDEX idx_binary_package_map_purl ON binaries.binary_package_map(pkg_purl) WHERE pkg_purl IS NOT NULL;
+
+-- corpus_snapshots indexes
+CREATE INDEX idx_corpus_snapshots_tenant ON binaries.corpus_snapshots(tenant_id);
+CREATE INDEX idx_corpus_snapshots_distro ON binaries.corpus_snapshots(distro, release, architecture);
+CREATE INDEX idx_corpus_snapshots_status ON binaries.corpus_snapshots(status) WHERE status IN ('pending', 'processing');
+
+-- vulnerable_buildids indexes
+CREATE INDEX idx_vulnerable_buildids_tenant ON binaries.vulnerable_buildids(tenant_id);
+CREATE INDEX idx_vulnerable_buildids_value ON binaries.vulnerable_buildids(buildid_type, buildid_value);
+CREATE INDEX idx_vulnerable_buildids_purl ON binaries.vulnerable_buildids(purl);
+
+-- binary_vuln_assertion indexes
+CREATE INDEX idx_binary_vuln_assertion_tenant ON binaries.binary_vuln_assertion(tenant_id);
+CREATE INDEX idx_binary_vuln_assertion_binary ON binaries.binary_vuln_assertion(binary_key);
+CREATE INDEX idx_binary_vuln_assertion_cve ON binaries.binary_vuln_assertion(cve_id);
+CREATE INDEX idx_binary_vuln_assertion_status ON binaries.binary_vuln_assertion(status) WHERE status = 'affected';
+
+-- cve_fix_evidence indexes
+CREATE INDEX idx_cve_fix_evidence_tenant ON binaries.cve_fix_evidence(tenant_id);
+CREATE INDEX idx_cve_fix_evidence_key ON binaries.cve_fix_evidence(distro, release, source_pkg, cve_id);
+
+-- cve_fix_index indexes
+CREATE INDEX idx_cve_fix_index_tenant ON binaries.cve_fix_index(tenant_id);
+CREATE INDEX idx_cve_fix_index_lookup ON binaries.cve_fix_index(distro, release, source_pkg, cve_id);
+CREATE INDEX idx_cve_fix_index_state ON binaries.cve_fix_index(state) WHERE state = 'fixed';
+
+-- vulnerable_fingerprints indexes
+CREATE INDEX idx_vulnerable_fingerprints_tenant ON binaries.vulnerable_fingerprints(tenant_id);
+CREATE INDEX idx_vulnerable_fingerprints_cve ON binaries.vulnerable_fingerprints(cve_id);
+CREATE INDEX idx_vulnerable_fingerprints_component ON binaries.vulnerable_fingerprints(component, architecture);
+CREATE INDEX idx_vulnerable_fingerprints_hash ON binaries.vulnerable_fingerprints USING hash (fingerprint_hash);
+CREATE INDEX idx_vulnerable_fingerprints_validated ON binaries.vulnerable_fingerprints(validated) WHERE validated = TRUE;
+
+-- fingerprint_corpus_metadata indexes
+CREATE INDEX idx_fingerprint_corpus_tenant ON binaries.fingerprint_corpus_metadata(tenant_id);
+CREATE INDEX idx_fingerprint_corpus_purl ON binaries.fingerprint_corpus_metadata(purl, version);
+
+-- fingerprint_matches indexes
+CREATE INDEX idx_fingerprint_matches_tenant ON binaries.fingerprint_matches(tenant_id);
+CREATE INDEX idx_fingerprint_matches_scan ON binaries.fingerprint_matches(scan_id);
+CREATE INDEX idx_fingerprint_matches_type ON binaries.fingerprint_matches(match_type);
+CREATE INDEX idx_fingerprint_matches_purl ON binaries.fingerprint_matches(vulnerable_purl);
+
+-- ============================================================================
+-- ROW-LEVEL SECURITY
+-- ============================================================================
+
+-- Enable RLS on all tenant-scoped tables
+ALTER TABLE binaries.binary_identity ENABLE ROW LEVEL SECURITY;
+ALTER TABLE binaries.binary_identity FORCE ROW LEVEL SECURITY;
+CREATE POLICY binary_identity_tenant_isolation ON binaries.binary_identity
+ FOR ALL USING (tenant_id::text = binaries_app.require_current_tenant())
+ WITH CHECK (tenant_id::text = binaries_app.require_current_tenant());
+
+ALTER TABLE binaries.binary_package_map ENABLE ROW LEVEL SECURITY;
+ALTER TABLE binaries.binary_package_map FORCE ROW LEVEL SECURITY;
+CREATE POLICY binary_package_map_tenant_isolation ON binaries.binary_package_map
+ FOR ALL USING (tenant_id::text = binaries_app.require_current_tenant())
+ WITH CHECK (tenant_id::text = binaries_app.require_current_tenant());
+
+ALTER TABLE binaries.corpus_snapshots ENABLE ROW LEVEL SECURITY;
+ALTER TABLE binaries.corpus_snapshots FORCE ROW LEVEL SECURITY;
+CREATE POLICY corpus_snapshots_tenant_isolation ON binaries.corpus_snapshots
+ FOR ALL USING (tenant_id::text = binaries_app.require_current_tenant())
+ WITH CHECK (tenant_id::text = binaries_app.require_current_tenant());
+
+ALTER TABLE binaries.vulnerable_buildids ENABLE ROW LEVEL SECURITY;
+ALTER TABLE binaries.vulnerable_buildids FORCE ROW LEVEL SECURITY;
+CREATE POLICY vulnerable_buildids_tenant_isolation ON binaries.vulnerable_buildids
+ FOR ALL USING (tenant_id::text = binaries_app.require_current_tenant())
+ WITH CHECK (tenant_id::text = binaries_app.require_current_tenant());
+
+ALTER TABLE binaries.binary_vuln_assertion ENABLE ROW LEVEL SECURITY;
+ALTER TABLE binaries.binary_vuln_assertion FORCE ROW LEVEL SECURITY;
+CREATE POLICY binary_vuln_assertion_tenant_isolation ON binaries.binary_vuln_assertion
+ FOR ALL USING (tenant_id::text = binaries_app.require_current_tenant())
+ WITH CHECK (tenant_id::text = binaries_app.require_current_tenant());
+
+ALTER TABLE binaries.cve_fix_evidence ENABLE ROW LEVEL SECURITY;
+ALTER TABLE binaries.cve_fix_evidence FORCE ROW LEVEL SECURITY;
+CREATE POLICY cve_fix_evidence_tenant_isolation ON binaries.cve_fix_evidence
+ FOR ALL USING (tenant_id::text = binaries_app.require_current_tenant())
+ WITH CHECK (tenant_id::text = binaries_app.require_current_tenant());
+
+ALTER TABLE binaries.cve_fix_index ENABLE ROW LEVEL SECURITY;
+ALTER TABLE binaries.cve_fix_index FORCE ROW LEVEL SECURITY;
+CREATE POLICY cve_fix_index_tenant_isolation ON binaries.cve_fix_index
+ FOR ALL USING (tenant_id::text = binaries_app.require_current_tenant())
+ WITH CHECK (tenant_id::text = binaries_app.require_current_tenant());
+
+ALTER TABLE binaries.vulnerable_fingerprints ENABLE ROW LEVEL SECURITY;
+ALTER TABLE binaries.vulnerable_fingerprints FORCE ROW LEVEL SECURITY;
+CREATE POLICY vulnerable_fingerprints_tenant_isolation ON binaries.vulnerable_fingerprints
+ FOR ALL USING (tenant_id::text = binaries_app.require_current_tenant())
+ WITH CHECK (tenant_id::text = binaries_app.require_current_tenant());
+
+ALTER TABLE binaries.fingerprint_corpus_metadata ENABLE ROW LEVEL SECURITY;
+ALTER TABLE binaries.fingerprint_corpus_metadata FORCE ROW LEVEL SECURITY;
+CREATE POLICY fingerprint_corpus_metadata_tenant_isolation ON binaries.fingerprint_corpus_metadata
+ FOR ALL USING (tenant_id::text = binaries_app.require_current_tenant())
+ WITH CHECK (tenant_id::text = binaries_app.require_current_tenant());
+
+ALTER TABLE binaries.fingerprint_matches ENABLE ROW LEVEL SECURITY;
+ALTER TABLE binaries.fingerprint_matches FORCE ROW LEVEL SECURITY;
+CREATE POLICY fingerprint_matches_tenant_isolation ON binaries.fingerprint_matches
+ FOR ALL USING (tenant_id::text = binaries_app.require_current_tenant())
+ WITH CHECK (tenant_id::text = binaries_app.require_current_tenant());
+```
+
+---
+
+## 3. Table Relationships
+
+```
+┌─────────────────────────────────────────────────────────────────────────────┐
+│ BINARIES SCHEMA │
+│ │
+│ ┌────────────────────┐ ┌────────────────────┐ │
+│ │ corpus_snapshots │<────────│ binary_package_map │ │
+│ │ (ingestion state) │ │ (binary→pkg) │ │
+│ └─────────┬──────────┘ └────────┬───────────┘ │
+│ │ │ │
+│ │ ▼ │
+│ │ ┌────────────────────┐ │
+│ └───────────────────>│ binary_identity │<─────────────────┐ │
+│ │ (Build-ID, hashes) │ │ │
+│ └────────┬───────────┘ │ │
+│ │ │ │
+│ ┌─────────────────────────────┼───────────────────────────────┤ │
+│ │ │ │ │
+│ ▼ ▼ │ │
+│ ┌────────────────────┐ ┌─────────────────────┐ ┌──────────┴───┐
+│ │ vulnerable_buildids│ │ binary_vuln_ │ │fingerprint_ │
+│ │ (known vuln builds)│ │ assertion │ │matches │
+│ └────────────────────┘ │ (CVE status) │ │(scan results)│
+│ └─────────────────────┘ └──────────────┘
+│ │
+│ ┌─────────────────────────────────────────────────────────────────────────┐│
+│ │ FIX INDEX (Patch-Aware) ││
+│ │ ┌────────────────────┐ ┌────────────────────┐ ││
+│ │ │ cve_fix_evidence │────────>│ cve_fix_index │ ││
+│ │ │ (raw evidence) │ merge │ (merged best) │ ││
+│ │ └────────────────────┘ └────────────────────┘ ││
+│ └─────────────────────────────────────────────────────────────────────────┘│
+│ │
+│ ┌─────────────────────────────────────────────────────────────────────────┐│
+│ │ FINGERPRINTS ││
+│ │ ┌────────────────────┐ ┌──────────────────────┐ ││
+│ │ │vulnerable_ │ │fingerprint_corpus_ │ ││
+│ │ │fingerprints │ │metadata │ ││
+│ │ │(CVE fingerprints) │ │(what's indexed) │ ││
+│ │ └────────────────────┘ └──────────────────────┘ ││
+│ └─────────────────────────────────────────────────────────────────────────┘│
+└─────────────────────────────────────────────────────────────────────────────┘
+```
+
+---
+
+## 4. Query Patterns
+
+### 4.1 Lookup by Build-ID
+
+```sql
+-- Find vulnerabilities for a specific Build-ID
+SELECT ba.cve_id, ba.status, ba.confidence, ba.method
+FROM binaries.binary_vuln_assertion ba
+JOIN binaries.binary_identity bi ON bi.binary_key = ba.binary_key
+WHERE bi.build_id = :build_id
+ AND bi.build_id_type = 'gnu-build-id'
+ AND ba.status = 'affected';
+```
+
+### 4.2 Check Fix Status (Patch-Aware)
+
+```sql
+-- Check if a CVE is fixed for a specific distro/package
+SELECT cfi.state, cfi.fixed_version, cfi.confidence, cfi.primary_method
+FROM binaries.cve_fix_index cfi
+WHERE cfi.distro = :distro
+ AND cfi.release = :release
+ AND cfi.source_pkg = :source_pkg
+ AND cfi.cve_id = :cve_id;
+```
+
+### 4.3 Fingerprint Similarity Search
+
+```sql
+-- Find fingerprints with similar hash (requires application-level similarity)
+SELECT vf.cve_id, vf.component, vf.function_name, vf.confidence
+FROM binaries.vulnerable_fingerprints vf
+WHERE vf.algorithm = :algorithm
+ AND vf.architecture = :architecture
+ AND vf.validated = TRUE
+ -- Application performs similarity comparison on fingerprint_hash
+```
+
+---
+
+## 5. Migration Strategy
+
+### 5.1 Initial Migration
+
+```sql
+-- V001__create_binaries_schema.sql
+-- Creates all tables, indexes, and RLS policies
+```
+
+### 5.2 Seed Data
+
+```sql
+-- S001__seed_reference_fingerprints.sql
+-- Seeds fingerprints for high-impact CVEs from golden corpus
+```
+
+---
+
+## 6. Performance Considerations
+
+### 6.1 Table Sizing Estimates
+
+| Table | Expected Rows | Growth Rate |
+|-------|---------------|-------------|
+| binary_identity | 10M | 1M/month |
+| binary_package_map | 50M | 5M/month |
+| vulnerable_buildids | 1M | 100K/month |
+| cve_fix_index | 500K | 50K/month |
+| vulnerable_fingerprints | 100K | 10K/month |
+| fingerprint_matches | 10M | 1M/month |
+
+### 6.2 Partitioning Candidates
+
+- `fingerprint_matches` - Partition by `matched_at` (monthly)
+- `cve_fix_evidence` - Partition by `created_at` (monthly)
+
+### 6.3 Index Maintenance
+
+- Hash index on `fingerprint_hash` for exact matches
+- Consider bloom filter for fingerprint similarity pre-filtering
+
+---
+
+*Document Version: 1.0.0*
+*Last Updated: 2025-12-21*
diff --git a/docs/implplan/SPRINT_3600_0001_0001_gateway_webservice.md b/docs/implplan/SPRINT_3600_0001_0001_gateway_webservice.md
new file mode 100644
index 000000000..65096e2be
--- /dev/null
+++ b/docs/implplan/SPRINT_3600_0001_0001_gateway_webservice.md
@@ -0,0 +1,378 @@
+# Sprint 3600.0001.0001 · Gateway WebService — HTTP Ingress Implementation
+
+## Topic & Scope
+- Implement the missing `StellaOps.Gateway.WebService` HTTP ingress service.
+- This is the single entry point for all external HTTP traffic, routing to microservices via the Router binary protocol.
+- Connects the existing `StellaOps.Router.Gateway` library to a production-ready ASP.NET Core host.
+- **Working directory:** `src/Gateway/StellaOps.Gateway.WebService/`
+
+## Dependencies & Concurrency
+- **Upstream**: `StellaOps.Router.Gateway`, `StellaOps.Router.Transport.*`, `StellaOps.Auth.ServerIntegration`
+- **Downstream**: All external API consumers, CLI, UI
+- **Safe to parallelize with**: Sprints 3600.0002.*, 4200.*, 5200.*
+
+## Documentation Prerequisites
+- `docs/modules/router/architecture.md` (canonical Router specification)
+- `docs/modules/gateway/openapi.md` (OpenAPI aggregation)
+- `docs/product-advisories/archived/2025-12-21-reference-architecture/20-Dec-2025 - Stella Ops Reference Architecture.md`
+- `docs/07_HIGH_LEVEL_ARCHITECTURE.md` Section 7 (APIs)
+
+---
+
+## Tasks
+
+### T1: Project Scaffolding
+
+**Assignee**: Platform Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+Create the Gateway.WebService project with proper structure and dependencies.
+
+**Implementation Path**: `src/Gateway/StellaOps.Gateway.WebService/`
+
+**Acceptance Criteria**:
+- [ ] `StellaOps.Gateway.WebService.csproj` targeting `net10.0`
+- [ ] References: `StellaOps.Router.Gateway`, `StellaOps.Auth.ServerIntegration`, `StellaOps.Router.Transport.Tcp`, `StellaOps.Router.Transport.Tls`
+- [ ] `Program.cs` with minimal viable bootstrap
+- [ ] `appsettings.json` and `appsettings.Development.json`
+- [ ] Dockerfile for containerized deployment
+- [ ] Added to `StellaOps.sln`
+
+**Project Structure**:
+```
+src/Gateway/
+├── StellaOps.Gateway.WebService/
+│ ├── StellaOps.Gateway.WebService.csproj
+│ ├── Program.cs
+│ ├── Dockerfile
+│ ├── appsettings.json
+│ ├── appsettings.Development.json
+│ ├── Configuration/
+│ │ └── GatewayOptions.cs
+│ ├── Middleware/
+│ │ ├── TenantMiddleware.cs
+│ │ ├── RequestRoutingMiddleware.cs
+│ │ └── HealthCheckMiddleware.cs
+│ └── Services/
+│ ├── GatewayHostedService.cs
+│ └── OpenApiAggregationService.cs
+```
+
+---
+
+### T2: Gateway Host Service
+
+**Assignee**: Platform Team
+**Story Points**: 5
+**Status**: TODO
+
+**Description**:
+Implement the hosted service that manages Router transport connections and microservice registration.
+
+**Acceptance Criteria**:
+- [ ] `GatewayHostedService` : `IHostedService`
+- [ ] Starts TCP/TLS transport servers on configured ports
+- [ ] Handles HELLO frames from microservices
+- [ ] Maintains connection health via heartbeats
+- [ ] Graceful shutdown with DRAINING state propagation
+- [ ] Metrics: active_connections, registered_endpoints
+
+**Code Spec**:
+```csharp
+public sealed class GatewayHostedService : IHostedService, IDisposable
+{
+ private readonly ITransportServer _tcpServer;
+ private readonly ITransportServer _tlsServer;
+ private readonly IRoutingStateManager _routingState;
+ private readonly ILogger _logger;
+
+ public async Task StartAsync(CancellationToken ct)
+ {
+ _tcpServer.OnHelloReceived += HandleHelloAsync;
+ _tcpServer.OnHeartbeatReceived += HandleHeartbeatAsync;
+ _tcpServer.OnConnectionClosed += HandleDisconnectAsync;
+
+ await _tcpServer.StartAsync(ct);
+ await _tlsServer.StartAsync(ct);
+
+ _logger.LogInformation("Gateway started on TCP:{TcpPort} TLS:{TlsPort}",
+ _options.TcpPort, _options.TlsPort);
+ }
+
+ public async Task StopAsync(CancellationToken ct)
+ {
+ await _routingState.DrainAllConnectionsAsync(ct);
+ await _tcpServer.StopAsync(ct);
+ await _tlsServer.StopAsync(ct);
+ }
+}
+```
+
+---
+
+### T3: Request Routing Middleware
+
+**Assignee**: Platform Team
+**Story Points**: 5
+**Status**: TODO
+
+**Description**:
+Implement the core HTTP-to-binary routing middleware.
+
+**Acceptance Criteria**:
+- [ ] `RequestRoutingMiddleware` intercepts all non-system routes
+- [ ] Extracts `(Method, Path)` from HTTP request
+- [ ] Looks up endpoint in routing state
+- [ ] Serializes HTTP request to binary frame
+- [ ] Sends to selected microservice instance
+- [ ] Deserializes binary response to HTTP response
+- [ ] Supports streaming responses (chunked transfer)
+- [ ] Propagates cancellation on client disconnect
+- [ ] Request correlation ID in X-Correlation-Id header
+
+**Routing Flow**:
+```
+HTTP Request → Middleware → RoutingState.SelectInstance()
+ ↓
+ TransportClient.SendRequestAsync()
+ ↓
+ Microservice processes
+ ↓
+ TransportClient.ReceiveResponseAsync()
+ ↓
+HTTP Response ← Middleware ← Response Frame
+```
+
+---
+
+### T4: Authentication & Authorization Integration
+
+**Assignee**: Platform Team
+**Story Points**: 5
+**Status**: TODO
+
+**Description**:
+Integrate Authority DPoP/mTLS validation and claims-based authorization.
+
+**Acceptance Criteria**:
+- [ ] DPoP token validation via `StellaOps.Auth.ServerIntegration`
+- [ ] mTLS certificate binding validation
+- [ ] Claims extraction and propagation to microservices
+- [ ] Endpoint-level authorization based on `RequiringClaims`
+- [ ] Tenant context extraction from `tid` claim
+- [ ] Rate limiting per tenant/identity
+- [ ] Audit logging of auth failures
+
+**Claims Propagation**:
+```csharp
+// Claims are serialized into request frame headers
+var claims = new Dictionary
+{
+ ["sub"] = principal.FindFirst("sub")?.Value ?? "",
+ ["tid"] = principal.FindFirst("tid")?.Value ?? "",
+ ["scope"] = string.Join(" ", principal.FindAll("scope").Select(c => c.Value)),
+ ["cnf.jkt"] = principal.FindFirst("cnf.jkt")?.Value ?? ""
+};
+requestFrame.Headers = claims;
+```
+
+---
+
+### T5: OpenAPI Aggregation Endpoint
+
+**Assignee**: Platform Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+Implement aggregated OpenAPI 3.1.0 spec generation from registered endpoints.
+
+**Acceptance Criteria**:
+- [ ] `GET /openapi.json` returns aggregated spec
+- [ ] `GET /openapi.yaml` returns YAML format
+- [ ] TTL-based caching (5 min default)
+- [ ] ETag generation for conditional requests
+- [ ] Schema validation before aggregation
+- [ ] Includes all registered endpoints with their schemas
+- [ ] Info section populated from gateway config
+
+---
+
+### T6: Health & Readiness Endpoints
+
+**Assignee**: Platform Team
+**Story Points**: 2
+**Status**: TODO
+
+**Description**:
+Implement health check endpoints for orchestration platforms.
+
+**Acceptance Criteria**:
+- [ ] `GET /health/live` - Liveness probe (process alive)
+- [ ] `GET /health/ready` - Readiness probe (accepting traffic)
+- [ ] `GET /health/startup` - Startup probe (initialization complete)
+- [ ] Downstream health aggregation from connected microservices
+- [ ] Metrics endpoint at `/metrics` (Prometheus format)
+
+---
+
+### T7: Configuration & Options
+
+**Assignee**: Platform Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+Define comprehensive gateway configuration model.
+
+**Acceptance Criteria**:
+- [ ] `GatewayOptions` with all configurable settings
+- [ ] YAML configuration support
+- [ ] Environment variable overrides
+- [ ] Configuration validation on startup
+- [ ] Hot-reload for non-transport settings
+
+**Configuration Spec**:
+```yaml
+gateway:
+ node:
+ region: "eu1"
+ nodeId: "gw-eu1-01"
+ environment: "prod"
+
+ transports:
+ tcp:
+ enabled: true
+ port: 9100
+ maxConnections: 1000
+ tls:
+ enabled: true
+ port: 9443
+ certificatePath: "/certs/gateway.pfx"
+ clientCertificateMode: "RequireCertificate"
+
+ routing:
+ defaultTimeout: "30s"
+ maxRequestBodySize: "100MB"
+ streamingEnabled: true
+ neighborRegions: ["eu2", "us1"]
+
+ auth:
+ dpopEnabled: true
+ mtlsEnabled: true
+ rateLimiting:
+ enabled: true
+ requestsPerMinute: 1000
+ burstSize: 100
+
+ openapi:
+ enabled: true
+ cacheTtlSeconds: 300
+```
+
+---
+
+### T8: Unit Tests
+
+**Assignee**: Platform Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+Comprehensive unit tests for gateway components.
+
+**Acceptance Criteria**:
+- [ ] Routing middleware tests (happy path, errors, timeouts)
+- [ ] Instance selection algorithm tests
+- [ ] Claims extraction tests
+- [ ] Configuration validation tests
+- [ ] OpenAPI aggregation tests
+- [ ] 90%+ code coverage
+
+---
+
+### T9: Integration Tests
+
+**Assignee**: Platform Team
+**Story Points**: 5
+**Status**: TODO
+
+**Description**:
+End-to-end integration tests with in-memory transport.
+
+**Acceptance Criteria**:
+- [ ] Request routing through gateway to mock microservice
+- [ ] Streaming response handling
+- [ ] Cancellation propagation
+- [ ] Auth flow integration
+- [ ] Multi-instance load balancing
+- [ ] Health check aggregation
+- [ ] Uses `StellaOps.Router.Transport.InMemory` for testing
+
+---
+
+### T10: Documentation
+
+**Assignee**: Platform Team
+**Story Points**: 2
+**Status**: TODO
+
+**Description**:
+Create gateway architecture documentation.
+
+**Acceptance Criteria**:
+- [ ] `docs/modules/gateway/architecture.md` - Full architecture card
+- [ ] Update `docs/07_HIGH_LEVEL_ARCHITECTURE.md` with gateway details
+- [ ] Operator runbook for deployment and troubleshooting
+- [ ] Configuration reference
+
+---
+
+## Delivery Tracker
+
+| # | Task ID | Status | Dependency | Owners | Task Definition |
+|---|---------|--------|------------|--------|-----------------|
+| 1 | T1 | TODO | — | Platform Team | Project Scaffolding |
+| 2 | T2 | TODO | T1 | Platform Team | Gateway Host Service |
+| 3 | T3 | TODO | T2 | Platform Team | Request Routing Middleware |
+| 4 | T4 | TODO | T1 | Platform Team | Auth & Authorization Integration |
+| 5 | T5 | TODO | T2 | Platform Team | OpenAPI Aggregation Endpoint |
+| 6 | T6 | TODO | T1 | Platform Team | Health & Readiness Endpoints |
+| 7 | T7 | TODO | T1 | Platform Team | Configuration & Options |
+| 8 | T8 | TODO | T1-T7 | Platform Team | Unit Tests |
+| 9 | T9 | TODO | T8 | Platform Team | Integration Tests |
+| 10 | T10 | TODO | T1-T9 | Platform Team | Documentation |
+
+---
+
+## Execution Log
+
+| Date (UTC) | Update | Owner |
+|------------|--------|-------|
+| 2025-12-21 | Sprint created from Reference Architecture advisory gap analysis. | Agent |
+
+---
+
+## Decisions & Risks
+
+| Item | Type | Owner | Notes |
+|------|------|-------|-------|
+| Single ingress point | Decision | Platform Team | All HTTP traffic goes through Gateway.WebService |
+| Binary protocol only for internal | Decision | Platform Team | No HTTP between Gateway and microservices |
+| TLS required for production | Decision | Platform Team | TCP transport only for development/testing |
+| DPoP + mTLS dual support | Decision | Platform Team | Both auth mechanisms supported concurrently |
+
+---
+
+## Success Criteria
+
+- [ ] Gateway accepts HTTP requests and routes to microservices via binary protocol
+- [ ] All existing Router.Gateway tests pass
+- [ ] `tests/StellaOps.Gateway.WebService.Tests/` project references work (no longer orphaned)
+- [ ] OpenAPI spec aggregation functional
+- [ ] Auth integration with Authority validated
+- [ ] Performance: <5ms routing overhead at P99
+
+**Sprint Status**: TODO (0/10 tasks complete)
diff --git a/docs/implplan/SPRINT_3600_0002_0001_cyclonedx_1_7_upgrade.md b/docs/implplan/SPRINT_3600_0002_0001_cyclonedx_1_7_upgrade.md
new file mode 100644
index 000000000..fe76b6c4d
--- /dev/null
+++ b/docs/implplan/SPRINT_3600_0002_0001_cyclonedx_1_7_upgrade.md
@@ -0,0 +1,309 @@
+# Sprint 3600.0002.0001 · CycloneDX 1.7 Upgrade — SBOM Format Migration
+
+## Topic & Scope
+- Upgrade all CycloneDX SBOM generation from version 1.6 to version 1.7.
+- Update serialization, parsing, and validation to CycloneDX 1.7 specification.
+- Maintain backward compatibility for reading CycloneDX 1.6 documents.
+- **Working directory:** `src/Scanner/__Libraries/StellaOps.Scanner.Emit/`, `src/SbomService/`, `src/Excititor/`
+
+## Dependencies & Concurrency
+- **Upstream**: CycloneDX Core NuGet package update
+- **Downstream**: All SBOM consumers (Policy, Excititor, ExportCenter)
+- **Safe to parallelize with**: Sprints 3600.0003.*, 4200.*, 5200.*
+
+## Documentation Prerequisites
+- CycloneDX 1.7 Specification: https://cyclonedx.org/docs/1.7/
+- `docs/modules/scanner/architecture.md`
+- `docs/modules/sbomservice/architecture.md`
+
+---
+
+## Tasks
+
+### T1: CycloneDX NuGet Package Update
+
+**Assignee**: Scanner Team
+**Story Points**: 2
+**Status**: TODO
+
+**Description**:
+Update CycloneDX.Core and related packages to versions supporting 1.7.
+
+**Acceptance Criteria**:
+- [ ] Update `CycloneDX.Core` to latest version with 1.7 support
+- [ ] Update `CycloneDX.Json` if separate
+- [ ] Update `CycloneDX.Protobuf` if separate
+- [ ] Verify all dependent projects build
+- [ ] No breaking API changes (or document migration path)
+
+**Package Updates**:
+```xml
+
+
+
+
+
+```
+
+---
+
+### T2: CycloneDxComposer Update
+
+**Assignee**: Scanner Team
+**Story Points**: 5
+**Status**: TODO
+
+**Description**:
+Update the SBOM composer to emit CycloneDX 1.7 format.
+
+**Implementation Path**: `src/Scanner/__Libraries/StellaOps.Scanner.Emit/Composition/CycloneDxComposer.cs`
+
+**Acceptance Criteria**:
+- [ ] Spec version set to "1.7"
+- [ ] Media type updated to `application/vnd.cyclonedx+json; version=1.7`
+- [ ] New 1.7 fields populated where applicable:
+ - [ ] `declarations` for attestations
+ - [ ] `definitions` for standards/requirements
+ - [ ] Enhanced `formulation` for build environment
+ - [ ] `modelCard` for ML components (if applicable)
+ - [ ] `cryptography` properties (if applicable)
+- [ ] Existing fields remain populated correctly
+- [ ] Deterministic output maintained
+
+**Key 1.7 Additions**:
+```csharp
+// CycloneDX 1.7 new features
+public sealed record CycloneDx17Enhancements
+{
+ // Attestations - link to in-toto/DSSE
+ public ImmutableArray Declarations { get; init; }
+
+ // Standards compliance (e.g., NIST, ISO)
+ public ImmutableArray Definitions { get; init; }
+
+ // Enhanced formulation for reproducibility
+ public Formulation? Formulation { get; init; }
+
+ // Cryptography bill of materials
+ public CryptographyProperties? Cryptography { get; init; }
+}
+```
+
+---
+
+### T3: SBOM Serialization Updates
+
+**Assignee**: Scanner Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+Update JSON and Protobuf serialization for 1.7 schema.
+
+**Acceptance Criteria**:
+- [ ] JSON serialization outputs valid CycloneDX 1.7
+- [ ] Protobuf serialization updated for 1.7 schema
+- [ ] Schema validation against official 1.7 JSON schema
+- [ ] Canonical JSON ordering preserved (determinism)
+- [ ] Empty collections omitted (spec compliance)
+
+---
+
+### T4: SBOM Parsing Backward Compatibility
+
+**Assignee**: Scanner Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+Ensure parsers can read both 1.6 and 1.7 CycloneDX documents.
+
+**Implementation Path**: `src/Excititor/__Libraries/StellaOps.Excititor.Formats.CycloneDX/`
+
+**Acceptance Criteria**:
+- [ ] Parser auto-detects spec version from document
+- [ ] 1.6 documents parsed without errors
+- [ ] 1.7 documents parsed with new fields
+- [ ] Unknown fields in future versions ignored gracefully
+- [ ] Version-specific validation applied
+
+**Parsing Logic**:
+```csharp
+public CycloneDxBom Parse(string json)
+{
+ var specVersion = ExtractSpecVersion(json);
+ return specVersion switch
+ {
+ "1.6" => ParseV16(json),
+ "1.7" => ParseV17(json),
+ _ when specVersion.StartsWith("1.") => ParseV17(json), // forward compat
+ _ => throw new UnsupportedSpecVersionException(specVersion)
+ };
+}
+```
+
+---
+
+### T5: VEX Format Updates
+
+**Assignee**: Scanner Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+Update VEX document generation to leverage CycloneDX 1.7 improvements.
+
+**Acceptance Criteria**:
+- [ ] VEX documents reference 1.7 spec
+- [ ] Enhanced `vulnerability.ratings` with CVSS 4.0 vectors
+- [ ] `vulnerability.affects[].versions` range expressions
+- [ ] `vulnerability.source` with PURL references
+- [ ] Backward-compatible with 1.6 VEX consumers
+
+---
+
+### T6: Media Type Updates
+
+**Assignee**: Scanner Team
+**Story Points**: 2
+**Status**: TODO
+
+**Description**:
+Update all media type references throughout the codebase.
+
+**Acceptance Criteria**:
+- [ ] Constants updated: `application/vnd.cyclonedx+json; version=1.7`
+- [ ] OCI artifact type updated for SBOM referrers
+- [ ] Content-Type headers in API responses updated
+- [ ] Accept header handling supports both 1.6 and 1.7
+
+**Media Type Constants**:
+```csharp
+public static class CycloneDxMediaTypes
+{
+ public const string JsonV17 = "application/vnd.cyclonedx+json; version=1.7";
+ public const string JsonV16 = "application/vnd.cyclonedx+json; version=1.6";
+ public const string Json = JsonV17; // Default to latest
+
+ public const string ProtobufV17 = "application/vnd.cyclonedx+protobuf; version=1.7";
+ public const string XmlV17 = "application/vnd.cyclonedx+xml; version=1.7";
+}
+```
+
+---
+
+### T7: Golden Corpus Update
+
+**Assignee**: Scanner Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+Update golden test corpus with CycloneDX 1.7 expected outputs.
+
+**Acceptance Criteria**:
+- [ ] Regenerate all golden SBOM files in 1.7 format
+- [ ] Verify determinism: same inputs produce identical outputs
+- [ ] Add 1.7-specific test cases (declarations, formulation)
+- [ ] Retain 1.6 golden files for backward compat testing
+- [ ] CI/CD determinism tests pass
+
+---
+
+### T8: Unit Tests
+
+**Assignee**: Scanner Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+Update and expand unit tests for 1.7 support.
+
+**Acceptance Criteria**:
+- [ ] Composer tests for 1.7 output
+- [ ] Parser tests for 1.6 and 1.7 input
+- [ ] Serialization round-trip tests
+- [ ] Schema validation tests
+- [ ] Media type handling tests
+
+---
+
+### T9: Integration Tests
+
+**Assignee**: Scanner Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+End-to-end integration tests with 1.7 SBOMs.
+
+**Acceptance Criteria**:
+- [ ] Full scan → SBOM → Policy evaluation flow
+- [ ] SBOM export to OCI registry as referrer
+- [ ] Cross-module SBOM consumption (Excititor, Policy)
+- [ ] Air-gap bundle with 1.7 SBOMs
+
+---
+
+### T10: Documentation Updates
+
+**Assignee**: Scanner Team
+**Story Points**: 2
+**Status**: TODO
+
+**Description**:
+Update documentation to reflect 1.7 upgrade.
+
+**Acceptance Criteria**:
+- [ ] Update `docs/modules/scanner/architecture.md` with 1.7 references
+- [ ] Update `docs/modules/sbomservice/architecture.md`
+- [ ] Update API documentation with new media types
+- [ ] Migration guide for 1.6 → 1.7
+
+---
+
+## Delivery Tracker
+
+| # | Task ID | Status | Dependency | Owners | Task Definition |
+|---|---------|--------|------------|--------|-----------------|
+| 1 | T1 | TODO | — | Scanner Team | NuGet Package Update |
+| 2 | T2 | TODO | T1 | Scanner Team | CycloneDxComposer Update |
+| 3 | T3 | TODO | T1 | Scanner Team | Serialization Updates |
+| 4 | T4 | TODO | T1 | Scanner Team | Parsing Backward Compatibility |
+| 5 | T5 | TODO | T2 | Scanner Team | VEX Format Updates |
+| 6 | T6 | TODO | T2 | Scanner Team | Media Type Updates |
+| 7 | T7 | TODO | T2-T6 | Scanner Team | Golden Corpus Update |
+| 8 | T8 | TODO | T2-T6 | Scanner Team | Unit Tests |
+| 9 | T9 | TODO | T8 | Scanner Team | Integration Tests |
+| 10 | T10 | TODO | T1-T9 | Scanner Team | Documentation Updates |
+
+---
+
+## Execution Log
+
+| Date (UTC) | Update | Owner |
+|------------|--------|-------|
+| 2025-12-21 | Sprint created from Reference Architecture advisory - upgrading from 1.6 to 1.7. | Agent |
+
+---
+
+## Decisions & Risks
+
+| Item | Type | Owner | Notes |
+|------|------|-------|-------|
+| Default to 1.7 | Decision | Scanner Team | New SBOMs default to 1.7; 1.6 available via config |
+| Backward compat | Decision | Scanner Team | Parsers support 1.5, 1.6, 1.7 for ingestion |
+| Protobuf sync | Risk | Scanner Team | Protobuf schema may lag JSON; prioritize JSON |
+| NuGet availability | Risk | Scanner Team | CycloneDX.Core 1.7 support timing unclear |
+
+---
+
+## Success Criteria
+
+- [ ] All SBOM generation outputs valid CycloneDX 1.7
+- [ ] All parsers read 1.6 and 1.7 without errors
+- [ ] Determinism tests pass with 1.7 output
+- [ ] No regression in scan-to-policy flow
+- [ ] Media types correctly reflect 1.7
+
+**Sprint Status**: TODO (0/10 tasks complete)
diff --git a/docs/implplan/SPRINT_3600_0003_0001_spdx_3_0_1_generation.md b/docs/implplan/SPRINT_3600_0003_0001_spdx_3_0_1_generation.md
new file mode 100644
index 000000000..6a212c258
--- /dev/null
+++ b/docs/implplan/SPRINT_3600_0003_0001_spdx_3_0_1_generation.md
@@ -0,0 +1,387 @@
+# Sprint 3600.0003.0001 · SPDX 3.0.1 Native Generation — Full SBOM Format Support
+
+## Topic & Scope
+- Implement native SPDX 3.0.1 SBOM generation capability.
+- Currently only license normalization and import parsing exists; this sprint adds full generation.
+- Provide SPDX 3.0.1 as an alternative output format alongside CycloneDX 1.7.
+- **Working directory:** `src/Scanner/__Libraries/StellaOps.Scanner.Emit/`, `src/SbomService/`
+
+## Dependencies & Concurrency
+- **Upstream**: Sprint 3600.0002.0001 (CycloneDX 1.7 - establishes patterns)
+- **Downstream**: ExportCenter, air-gap bundles, Policy (optional SPDX support)
+- **Safe to parallelize with**: Sprints 4200.*, 5200.*
+
+## Documentation Prerequisites
+- SPDX 3.0.1 Specification: https://spdx.github.io/spdx-spec/v3.0.1/
+- `docs/modules/scanner/architecture.md`
+- Existing: `src/AirGap/StellaOps.AirGap.Importer/Reconciliation/Parsers/SpdxParser.cs`
+
+---
+
+## Tasks
+
+### T1: SPDX 3.0.1 Domain Model
+
+**Assignee**: Scanner Team
+**Story Points**: 5
+**Status**: TODO
+
+**Description**:
+Create comprehensive C# domain model for SPDX 3.0.1 elements.
+
+**Implementation Path**: `src/Scanner/__Libraries/StellaOps.Scanner.Emit/Spdx/Models/`
+
+**Acceptance Criteria**:
+- [ ] Core classes: `SpdxDocument`, `SpdxElement`, `SpdxRelationship`
+- [ ] Package model: `SpdxPackage` with all 3.0.1 fields
+- [ ] File model: `SpdxFile` with checksums and annotations
+- [ ] Snippet model: `SpdxSnippet` for partial file references
+- [ ] Licensing: `SpdxLicense`, `SpdxLicenseExpression`, `SpdxExtractedLicense`
+- [ ] Security: `SpdxVulnerability`, `SpdxVulnAssessment`
+- [ ] Annotations and relationships per spec
+- [ ] Immutable records with init-only properties
+
+**Core Model**:
+```csharp
+namespace StellaOps.Scanner.Emit.Spdx.Models;
+
+public sealed record SpdxDocument
+{
+ public required string SpdxVersion { get; init; } // "SPDX-3.0.1"
+ public required string DocumentNamespace { get; init; }
+ public required string Name { get; init; }
+ public required SpdxCreationInfo CreationInfo { get; init; }
+ public ImmutableArray Elements { get; init; }
+ public ImmutableArray Relationships { get; init; }
+ public ImmutableArray Annotations { get; init; }
+}
+
+public abstract record SpdxElement
+{
+ public required string SpdxId { get; init; }
+ public string? Name { get; init; }
+ public string? Comment { get; init; }
+}
+
+public sealed record SpdxPackage : SpdxElement
+{
+ public string? Version { get; init; }
+ public string? PackageUrl { get; init; } // PURL
+ public string? DownloadLocation { get; init; }
+ public SpdxLicenseExpression? DeclaredLicense { get; init; }
+ public SpdxLicenseExpression? ConcludedLicense { get; init; }
+ public string? CopyrightText { get; init; }
+ public ImmutableArray Checksums { get; init; }
+ public ImmutableArray ExternalRefs { get; init; }
+ public SpdxPackageVerificationCode? VerificationCode { get; init; }
+}
+
+public sealed record SpdxRelationship
+{
+ public required string FromElement { get; init; }
+ public required SpdxRelationshipType Type { get; init; }
+ public required string ToElement { get; init; }
+}
+```
+
+---
+
+### T2: SPDX 3.0.1 Composer
+
+**Assignee**: Scanner Team
+**Story Points**: 5
+**Status**: TODO
+
+**Description**:
+Implement SBOM composer that generates SPDX 3.0.1 documents from scan results.
+
+**Implementation Path**: `src/Scanner/__Libraries/StellaOps.Scanner.Emit/Composition/SpdxComposer.cs`
+
+**Acceptance Criteria**:
+- [ ] `ISpdxComposer` interface with `Compose()` method
+- [ ] `SpdxComposer` implementation
+- [ ] Maps internal package model to SPDX packages
+- [ ] Generates DESCRIBES relationships for root packages
+- [ ] Generates DEPENDENCY_OF relationships for dependencies
+- [ ] Populates license expressions from detected licenses
+- [ ] Deterministic SPDX ID generation (content-addressed)
+- [ ] Document namespace follows URI pattern
+
+**Composer Interface**:
+```csharp
+public interface ISpdxComposer
+{
+ SpdxDocument Compose(
+ ScanResult scanResult,
+ SpdxCompositionOptions options,
+ CancellationToken cancellationToken = default);
+
+ ValueTask ComposeAsync(
+ ScanResult scanResult,
+ SpdxCompositionOptions options,
+ CancellationToken cancellationToken = default);
+}
+
+public sealed record SpdxCompositionOptions
+{
+ public string CreatorTool { get; init; } = "StellaOps-Scanner";
+ public string? CreatorOrganization { get; init; }
+ public string NamespaceBase { get; init; } = "https://stellaops.io/spdx";
+ public bool IncludeFiles { get; init; } = false;
+ public bool IncludeSnippets { get; init; } = false;
+ public SpdxLicenseListVersion LicenseListVersion { get; init; } = SpdxLicenseListVersion.V3_21;
+}
+```
+
+---
+
+### T3: SPDX JSON-LD Serialization
+
+**Assignee**: Scanner Team
+**Story Points**: 5
+**Status**: TODO
+
+**Description**:
+Implement JSON-LD serialization per SPDX 3.0.1 specification.
+
+**Acceptance Criteria**:
+- [ ] JSON-LD output with proper @context
+- [ ] @type annotations for all elements
+- [ ] @id for element references
+- [ ] Canonical JSON ordering (deterministic)
+- [ ] Schema validation against official SPDX 3.0.1 JSON schema
+- [ ] Compact JSON-LD form (not expanded)
+
+**JSON-LD Output Example**:
+```json
+{
+ "@context": "https://spdx.org/rdf/3.0.1/spdx-context.jsonld",
+ "@type": "SpdxDocument",
+ "spdxVersion": "SPDX-3.0.1",
+ "name": "SBOM for container:sha256:abc123",
+ "documentNamespace": "https://stellaops.io/spdx/container/sha256:abc123",
+ "creationInfo": {
+ "@type": "CreationInfo",
+ "created": "2025-12-21T10:00:00Z",
+ "createdBy": ["Tool: StellaOps-Scanner-1.0.0"]
+ },
+ "rootElement": ["SPDXRef-Package-root"],
+ "element": [
+ {
+ "@type": "Package",
+ "@id": "SPDXRef-Package-root",
+ "name": "myapp",
+ "packageVersion": "1.0.0",
+ "packageUrl": "pkg:oci/myapp@sha256:abc123"
+ }
+ ]
+}
+```
+
+---
+
+### T4: SPDX Tag-Value Serialization (Optional)
+
+**Assignee**: Scanner Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+Implement legacy tag-value format for backward compatibility.
+
+**Acceptance Criteria**:
+- [ ] Tag-value output matching SPDX 2.3 format
+- [ ] Deterministic field ordering
+- [ ] Proper escaping of multi-line text
+- [ ] Relationship serialization
+- [ ] Can be disabled via configuration
+
+**Tag-Value Example**:
+```
+SPDXVersion: SPDX-2.3
+DataLicense: CC0-1.0
+SPDXID: SPDXRef-DOCUMENT
+DocumentName: SBOM for container:sha256:abc123
+DocumentNamespace: https://stellaops.io/spdx/container/sha256:abc123
+
+PackageName: myapp
+SPDXID: SPDXRef-Package-root
+PackageVersion: 1.0.0
+PackageDownloadLocation: NOASSERTION
+```
+
+---
+
+### T5: License Expression Handling
+
+**Assignee**: Scanner Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+Implement SPDX license expression parsing and generation.
+
+**Acceptance Criteria**:
+- [ ] Parse SPDX license expressions (AND, OR, WITH)
+- [ ] Generate valid license expressions
+- [ ] Handle LicenseRef- for custom licenses
+- [ ] Validate against SPDX license list
+- [ ] Support SPDX 3.21 license list
+
+**License Expression Model**:
+```csharp
+public abstract record SpdxLicenseExpression;
+
+public sealed record SpdxSimpleLicense(string LicenseId) : SpdxLicenseExpression;
+
+public sealed record SpdxConjunctiveLicense(
+ SpdxLicenseExpression Left,
+ SpdxLicenseExpression Right) : SpdxLicenseExpression; // AND
+
+public sealed record SpdxDisjunctiveLicense(
+ SpdxLicenseExpression Left,
+ SpdxLicenseExpression Right) : SpdxLicenseExpression; // OR
+
+public sealed record SpdxWithException(
+ SpdxLicenseExpression License,
+ string Exception) : SpdxLicenseExpression;
+```
+
+---
+
+### T6: SPDX-CycloneDX Conversion
+
+**Assignee**: Scanner Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+Implement bidirectional conversion between SPDX and CycloneDX.
+
+**Acceptance Criteria**:
+- [ ] CycloneDX → SPDX conversion
+- [ ] SPDX → CycloneDX conversion
+- [ ] Preserve all common fields
+- [ ] Handle format-specific fields gracefully
+- [ ] Conversion loss documented
+
+---
+
+### T7: SBOM Service Integration
+
+**Assignee**: Scanner Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+Integrate SPDX generation into SBOM service endpoints.
+
+**Implementation Path**: `src/SbomService/`
+
+**Acceptance Criteria**:
+- [ ] `Accept: application/spdx+json` returns SPDX 3.0.1
+- [ ] `Accept: text/spdx` returns tag-value format
+- [ ] Query parameter `?format=spdx` as alternative
+- [ ] Default remains CycloneDX 1.7
+- [ ] Caching works for both formats
+
+---
+
+### T8: OCI Artifact Type Registration
+
+**Assignee**: Scanner Team
+**Story Points**: 2
+**Status**: TODO
+
+**Description**:
+Register SPDX SBOMs as OCI referrers with proper artifact type.
+
+**Acceptance Criteria**:
+- [ ] Artifact type: `application/spdx+json`
+- [ ] Push to registry alongside CycloneDX
+- [ ] Configurable: push one or both formats
+- [ ] Referrer index lists both when available
+
+---
+
+### T9: Unit Tests
+
+**Assignee**: Scanner Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+Comprehensive unit tests for SPDX generation.
+
+**Acceptance Criteria**:
+- [ ] Model construction tests
+- [ ] Composer tests for various scan results
+- [ ] JSON-LD serialization tests
+- [ ] Tag-value serialization tests
+- [ ] License expression tests
+- [ ] Conversion tests
+
+---
+
+### T10: Integration Tests & Golden Corpus
+
+**Assignee**: Scanner Team
+**Story Points**: 3
+**Status**: TODO
+
+**Description**:
+End-to-end tests and golden file corpus for SPDX.
+
+**Acceptance Criteria**:
+- [ ] Full scan → SPDX flow
+- [ ] Golden SPDX files for determinism testing
+- [ ] SPDX validation against official tooling
+- [ ] Air-gap bundle with SPDX SBOMs
+
+---
+
+## Delivery Tracker
+
+| # | Task ID | Status | Dependency | Owners | Task Definition |
+|---|---------|--------|------------|--------|-----------------|
+| 1 | T1 | TODO | — | Scanner Team | SPDX 3.0.1 Domain Model |
+| 2 | T2 | TODO | T1 | Scanner Team | SPDX 3.0.1 Composer |
+| 3 | T3 | TODO | T1 | Scanner Team | JSON-LD Serialization |
+| 4 | T4 | TODO | T1 | Scanner Team | Tag-Value Serialization |
+| 5 | T5 | TODO | — | Scanner Team | License Expression Handling |
+| 6 | T6 | TODO | T1, T3 | Scanner Team | SPDX-CycloneDX Conversion |
+| 7 | T7 | TODO | T2, T3 | Scanner Team | SBOM Service Integration |
+| 8 | T8 | TODO | T7 | Scanner Team | OCI Artifact Type Registration |
+| 9 | T9 | TODO | T1-T6 | Scanner Team | Unit Tests |
+| 10 | T10 | TODO | T7-T8 | Scanner Team | Integration Tests |
+
+---
+
+## Execution Log
+
+| Date (UTC) | Update | Owner |
+|------------|--------|-------|
+| 2025-12-21 | Sprint created from Reference Architecture advisory - adding SPDX 3.0.1 generation. | Agent |
+
+---
+
+## Decisions & Risks
+
+| Item | Type | Owner | Notes |
+|------|------|-------|-------|
+| JSON-LD primary | Decision | Scanner Team | JSON-LD is primary format; tag-value for legacy |
+| CycloneDX default | Decision | Scanner Team | CycloneDX remains default; SPDX opt-in |
+| SPDX 3.0.1 only | Decision | Scanner Team | No support for SPDX 2.x generation (only parsing) |
+| License list sync | Risk | Scanner Team | SPDX license list updates may require periodic sync |
+
+---
+
+## Success Criteria
+
+- [ ] Valid SPDX 3.0.1 JSON-LD output from scans
+- [ ] Passes official SPDX validation tools
+- [ ] Deterministic output (same input = same output)
+- [ ] Can export both CycloneDX and SPDX for same scan
+- [ ] Documentation complete
+
+**Sprint Status**: TODO (0/10 tasks complete)
diff --git a/docs/implplan/SPRINT_3600_SUMMARY.md b/docs/implplan/SPRINT_3600_SUMMARY.md
new file mode 100644
index 000000000..4d817991d
--- /dev/null
+++ b/docs/implplan/SPRINT_3600_SUMMARY.md
@@ -0,0 +1,87 @@
+# Sprint Series 3600 · Reference Architecture Gap Closure
+
+## Overview
+
+This sprint series addresses gaps identified from the **20-Dec-2025 Reference Architecture Advisory** analysis. These sprints complete the implementation of the Stella Ops reference architecture vision.
+
+## Sprint Index
+
+| Sprint | Title | Priority | Status | Dependencies |
+|--------|-------|----------|--------|--------------|
+| 3600.0001.0001 | Gateway WebService | HIGH | TODO | Router infrastructure (complete) |
+| 3600.0002.0001 | CycloneDX 1.7 Upgrade | HIGH | TODO | None |
+| 3600.0003.0001 | SPDX 3.0.1 Generation | MEDIUM | TODO | 3600.0002.0001 |
+
+## Related Sprints (Other Series)
+
+| Sprint | Title | Priority | Status | Series |
+|--------|-------|----------|--------|--------|
+| 4200.0001.0001 | Proof Chain Verification UI | HIGH | TODO | 4200 (UI) |
+| 5200.0001.0001 | Starter Policy Template | HIGH | TODO | 5200 (Docs) |
+
+## Gap Analysis Source
+
+**Advisory**: `docs/product-advisories/archived/2025-12-21-reference-architecture/20-Dec-2025 - Stella Ops Reference Architecture.md`
+
+### Gaps Addressed
+
+| Gap | Sprint | Description |
+|-----|--------|-------------|
+| Gateway WebService Missing | 3600.0001.0001 | HTTP ingress service not implemented |
+| CycloneDX 1.6 → 1.7 | 3600.0002.0001 | Upgrade to latest CycloneDX spec |
+| SPDX 3.0.1 Generation | 3600.0003.0001 | Native SPDX SBOM generation |
+| Proof Chain UI | 4200.0001.0001 | Evidence transparency dashboard |
+| Starter Policy | 5200.0001.0001 | Day-1 policy pack for onboarding |
+
+### Already Implemented (No Action Required)
+
+| Component | Status | Notes |
+|-----------|--------|-------|
+| Scheduler | Complete | Full implementation with PostgreSQL, Redis |
+| Policy Engine | Complete | Signed verdicts, deterministic IR, exceptions |
+| Authority | Complete | DPoP/mTLS, OpToks, JWKS rotation |
+| Attestor | Complete | DSSE/in-toto, Rekor v2, proof chains |
+| Timeline/Notify | Complete | TimelineIndexer + Notify with 4 channels |
+| Excititor | Complete | VEX ingestion, CycloneDX, OpenVEX |
+| Concelier | Complete | 31+ connectors, Link-Not-Merge |
+| Reachability/Signals | Complete | 5-factor scoring, lattice logic |
+| OCI Referrers | Complete | ExportCenter + Excititor |
+| Tenant Isolation | Complete | RLS, per-tenant keys, namespaces |
+
+## Execution Order
+
+```mermaid
+graph LR
+ A[3600.0002.0001
CycloneDX 1.7] --> B[3600.0003.0001
SPDX 3.0.1]
+ C[3600.0001.0001
Gateway WebService] --> D[Production Ready]
+ B --> D
+ E[4200.0001.0001
Proof Chain UI] --> D
+ F[5200.0001.0001
Starter Policy] --> D
+```
+
+## Success Criteria for Series
+
+- [ ] Gateway WebService accepts HTTP and routes to microservices
+- [ ] All SBOMs generated in CycloneDX 1.7 format
+- [ ] SPDX 3.0.1 available as alternative SBOM format
+- [ ] Auditors can view complete evidence chains in UI
+- [ ] New customers can deploy starter policy in <5 minutes
+
+## Created
+
+- **Date**: 2025-12-21
+- **Source**: Reference Architecture Advisory Gap Analysis
+- **Author**: Agent
+
+---
+
+## Sprint Status Summary
+
+| Sprint | Tasks | Completed | Status |
+|--------|-------|-----------|--------|
+| 3600.0001.0001 | 10 | 0 | TODO |
+| 3600.0002.0001 | 10 | 0 | TODO |
+| 3600.0003.0001 | 10 | 0 | TODO |
+| 4200.0001.0001 | 11 | 0 | TODO |
+| 5200.0001.0001 | 10 | 0 | TODO |
+| **Total** | **51** | **0** | **TODO** |
diff --git a/docs/implplan/SPRINT_4000_0001_0001_unknowns_decay_algorithm.md b/docs/implplan/SPRINT_4000_0001_0001_unknowns_decay_algorithm.md
new file mode 100644
index 000000000..b31471220
--- /dev/null
+++ b/docs/implplan/SPRINT_4000_0001_0001_unknowns_decay_algorithm.md
@@ -0,0 +1,384 @@
+# Sprint 4000.0001.0001 · Unknowns Decay Algorithm
+
+## Topic & Scope
+
+- Add time-based decay factor to the UnknownRanker scoring algorithm
+- Implements bucket-based freshness decay following existing `FreshnessModels` pattern
+- Ensures older unknowns gradually reduce in priority unless re-evaluated
+
+**Working directory:** `src/Policy/__Libraries/StellaOps.Policy.Unknowns/`
+
+## Dependencies & Concurrency
+
+- **Upstream**: None (first sprint in batch)
+- **Downstream**: Sprint 4000.0001.0002 (BlastRadius/Containment)
+- **Safe to parallelize with**: Sprint 4000.0002.0001 (EPSS Connector)
+
+## Documentation Prerequisites
+
+- `src/Policy/__Libraries/StellaOps.Policy.Unknowns/AGENTS.md`
+- `src/Policy/__Libraries/StellaOps.Policy/Scoring/FreshnessModels.cs` (pattern reference)
+- `docs/product-advisories/14-Dec-2025 - Triage and Unknowns Technical Reference.md`
+
+---
+
+## Tasks
+
+### T1: Extend UnknownRankInput with Timestamps
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Add timestamp fields to `UnknownRankInput` record to support decay calculation.
+
+**Implementation Path**: `Services/UnknownRanker.cs` (lines 16-23)
+
+**Changes**:
+```csharp
+public sealed record UnknownRankInput(
+ bool HasVexStatement,
+ bool HasReachabilityData,
+ bool HasConflictingSources,
+ bool IsStaleAdvisory,
+ bool IsInKev,
+ decimal EpssScore,
+ decimal CvssScore,
+ // NEW: Time-based decay inputs
+ DateTimeOffset? FirstSeenAt,
+ DateTimeOffset? LastEvaluatedAt,
+ DateTimeOffset AsOfDateTime);
+```
+
+**Acceptance Criteria**:
+- [ ] `FirstSeenAt` nullable timestamp added (when unknown first detected)
+- [ ] `LastEvaluatedAt` nullable timestamp added (last ranking recalculation)
+- [ ] `AsOfDateTime` required timestamp added (reference time for decay)
+- [ ] Backward compatible: existing callers can pass null for new optional fields
+- [ ] All existing tests still pass
+
+---
+
+### T2: Implement DecayCalculator
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Implement bucket-based decay calculation following the `FreshnessModels` pattern in `StellaOps.Policy.Scoring`.
+
+**Implementation Path**: `Services/UnknownRanker.cs`
+
+**Decay Buckets** (from FreshnessModels pattern):
+```csharp
+///
+/// Computes decay factor based on days since last evaluation.
+/// Returns 1.0 for fresh, decreasing to 0.2 for very old.
+///
+private static decimal ComputeDecayFactor(UnknownRankInput input)
+{
+ if (input.LastEvaluatedAt is null)
+ return 1.0m; // No history = no decay
+
+ var ageDays = (int)(input.AsOfDateTime - input.LastEvaluatedAt.Value).TotalDays;
+
+ return ageDays switch
+ {
+ <= 7 => 1.00m, // Fresh (7d): 100%
+ <= 30 => 0.90m, // 30d: 90%
+ <= 90 => 0.75m, // 90d: 75%
+ <= 180 => 0.60m, // 180d: 60%
+ <= 365 => 0.40m, // 365d: 40%
+ _ => 0.20m // >365d: 20%
+ };
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `ComputeDecayFactor` method implemented with bucket logic
+- [ ] Returns `1.0m` when `LastEvaluatedAt` is null (no decay)
+- [ ] All arithmetic uses `decimal` for determinism
+- [ ] Buckets match FreshnessModels pattern (7/30/90/180/365 days)
+
+---
+
+### T3: Extend UnknownRankerOptions
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T2
+
+**Description**:
+Add decay configuration options to allow customization of decay behavior.
+
+**Implementation Path**: `Services/UnknownRanker.cs` (lines 162-172)
+
+**Changes**:
+```csharp
+public sealed class UnknownRankerOptions
+{
+ // Existing band thresholds
+ public decimal HotThreshold { get; set; } = 75m;
+ public decimal WarmThreshold { get; set; } = 50m;
+ public decimal ColdThreshold { get; set; } = 25m;
+
+ // NEW: Decay configuration
+ public bool EnableDecay { get; set; } = true;
+ public IReadOnlyList DecayBuckets { get; set; } = DefaultDecayBuckets;
+
+ public static IReadOnlyList DefaultDecayBuckets { get; } =
+ [
+ new DecayBucket(7, 10000), // 7d: 100%
+ new DecayBucket(30, 9000), // 30d: 90%
+ new DecayBucket(90, 7500), // 90d: 75%
+ new DecayBucket(180, 6000), // 180d: 60%
+ new DecayBucket(365, 4000), // 365d: 40%
+ new DecayBucket(int.MaxValue, 2000) // >365d: 20%
+ ];
+}
+
+public sealed record DecayBucket(int MaxAgeDays, int MultiplierBps);
+```
+
+**Acceptance Criteria**:
+- [ ] `EnableDecay` toggle added (default: true)
+- [ ] `DecayBuckets` configurable list added
+- [ ] Uses basis points (10000 = 100%) for integer math
+- [ ] Default buckets match T2 implementation
+- [ ] DI configuration via `services.Configure()` works
+
+---
+
+### T4: Integrate Decay into Rank()
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T2, T3
+
+**Description**:
+Apply decay factor to the final score calculation in the `Rank()` method.
+
+**Implementation Path**: `Services/UnknownRanker.cs` (lines 87-95)
+
+**Updated Rank Method**:
+```csharp
+public UnknownRankResult Rank(UnknownRankInput input)
+{
+ var uncertainty = ComputeUncertainty(input);
+ var pressure = ComputeExploitPressure(input);
+ var rawScore = Math.Round((uncertainty * 50m) + (pressure * 50m), 2);
+
+ // Apply decay factor if enabled
+ decimal decayFactor = 1.0m;
+ if (_options.EnableDecay)
+ {
+ decayFactor = ComputeDecayFactor(input);
+ }
+
+ var score = Math.Round(rawScore * decayFactor, 2);
+ var band = AssignBand(score);
+
+ return new UnknownRankResult(score, uncertainty, pressure, band, decayFactor);
+}
+```
+
+**Updated Result Record**:
+```csharp
+public sealed record UnknownRankResult(
+ decimal Score,
+ decimal UncertaintyFactor,
+ decimal ExploitPressure,
+ UnknownBand Band,
+ decimal DecayFactor = 1.0m); // NEW field
+```
+
+**Acceptance Criteria**:
+- [ ] Decay factor applied as multiplier to raw score
+- [ ] `DecayFactor` added to `UnknownRankResult`
+- [ ] Score still rounded to 2 decimal places
+- [ ] Band assignment uses decayed score
+- [ ] When `EnableDecay = false`, decay factor is 1.0
+
+---
+
+### T5: Add Decay Tests
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T4
+
+**Description**:
+Add comprehensive tests for decay calculation covering all buckets and edge cases.
+
+**Implementation Path**: `src/Policy/__Tests/StellaOps.Policy.Unknowns.Tests/Services/UnknownRankerTests.cs`
+
+**Test Cases**:
+```csharp
+#region Decay Factor Tests
+
+[Fact]
+public void ComputeDecay_NullLastEvaluated_Returns100Percent()
+{
+ var input = CreateInputWithAge(lastEvaluatedAt: null);
+ var result = _ranker.Rank(input);
+ result.DecayFactor.Should().Be(1.00m);
+}
+
+[Theory]
+[InlineData(0, 1.00)] // Today
+[InlineData(7, 1.00)] // 7 days
+[InlineData(8, 0.90)] // 8 days (next bucket)
+[InlineData(30, 0.90)] // 30 days
+[InlineData(31, 0.75)] // 31 days
+[InlineData(90, 0.75)] // 90 days
+[InlineData(91, 0.60)] // 91 days
+[InlineData(180, 0.60)] // 180 days
+[InlineData(181, 0.40)] // 181 days
+[InlineData(365, 0.40)] // 365 days
+[InlineData(366, 0.20)] // 366 days
+[InlineData(1000, 0.20)] // Very old
+public void ComputeDecay_AgeBuckets_ReturnsCorrectMultiplier(int ageDays, decimal expected)
+{
+ var asOf = DateTimeOffset.UtcNow;
+ var input = CreateInputWithAge(
+ lastEvaluatedAt: asOf.AddDays(-ageDays),
+ asOfDateTime: asOf);
+
+ var result = _ranker.Rank(input);
+ result.DecayFactor.Should().Be(expected);
+}
+
+[Fact]
+public void Rank_WithDecay_AppliesMultiplierToScore()
+{
+ // Arrange: Create input that would score 50 without decay
+ var input = CreateHighScoreInput(ageDays: 100); // 75% decay
+
+ // Act
+ var result = _ranker.Rank(input);
+
+ // Assert: Score should be 50 * 0.75 = 37.50
+ result.Score.Should().Be(37.50m);
+ result.DecayFactor.Should().Be(0.75m);
+}
+
+[Fact]
+public void Rank_DecayDisabled_ReturnsFullScore()
+{
+ // Arrange
+ var options = new UnknownRankerOptions { EnableDecay = false };
+ var ranker = new UnknownRanker(Options.Create(options));
+ var input = CreateHighScoreInput(ageDays: 100);
+
+ // Act
+ var result = ranker.Rank(input);
+
+ // Assert
+ result.DecayFactor.Should().Be(1.0m);
+}
+
+[Fact]
+public void Rank_Determinism_SameInputSameOutput()
+{
+ var input = CreateInputWithAge(ageDays: 45);
+
+ var results = Enumerable.Range(0, 100)
+ .Select(_ => _ranker.Rank(input))
+ .ToList();
+
+ results.Should().AllBeEquivalentTo(results[0]);
+}
+
+#endregion
+```
+
+**Acceptance Criteria**:
+- [ ] Test for null `LastEvaluatedAt` returns 1.0
+- [ ] Theory test covers all bucket boundaries (0, 7, 8, 30, 31, 90, 91, 180, 181, 365, 366)
+- [ ] Test verifies decay multiplier applied to score
+- [ ] Test verifies `EnableDecay = false` bypasses decay
+- [ ] Determinism test confirms reproducibility
+- [ ] All 6+ new tests pass
+
+---
+
+### T6: Update UnknownsRepository
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Ensure repository queries populate `first_seen_at` and `last_evaluated_at` columns.
+
+**Implementation Path**: `Repositories/UnknownsRepository.cs`
+
+**SQL Updates**:
+```sql
+-- Verify columns exist in policy.unknowns table
+-- first_seen_at should already exist per schema
+-- last_evaluated_at needs to be updated on each ranking
+
+UPDATE policy.unknowns
+SET last_evaluated_at = @now,
+ score = @score,
+ band = @band,
+ uncertainty_factor = @uncertainty,
+ exploit_pressure = @pressure
+WHERE id = @id AND tenant_id = @tenantId;
+```
+
+**Acceptance Criteria**:
+- [ ] `first_seen_at` column is set on INSERT (if not already)
+- [ ] `last_evaluated_at` column updated on every re-ranking
+- [ ] Repository methods return timestamps for decay calculation
+- [ ] RLS (tenant isolation) still enforced
+
+---
+
+## Delivery Tracker
+
+| # | Task ID | Status | Dependency | Owners | Task Definition |
+|---|---------|--------|------------|--------|-----------------|
+| 1 | T1 | TODO | — | Policy Team | Extend UnknownRankInput with timestamps |
+| 2 | T2 | TODO | T1 | Policy Team | Implement DecayCalculator |
+| 3 | T3 | TODO | T2 | Policy Team | Extend UnknownRankerOptions |
+| 4 | T4 | TODO | T2, T3 | Policy Team | Integrate decay into Rank() |
+| 5 | T5 | TODO | T4 | Policy Team | Add decay tests |
+| 6 | T6 | TODO | T1 | Policy Team | Update UnknownsRepository |
+
+---
+
+## Execution Log
+
+| Date (UTC) | Update | Owner |
+|------------|--------|-------|
+| 2025-12-21 | Sprint created from MOAT gap analysis. Decay logic identified as gap in Triage & Unknowns advisory. | Claude |
+
+---
+
+## Decisions & Risks
+
+| Item | Type | Owner | Notes |
+|------|------|-------|-------|
+| Decay as multiplier vs deduction | Decision | Policy Team | Using multiplier (score × decay) preserves relative ordering |
+| Bucket boundaries | Decision | Policy Team | Following FreshnessModels pattern (7/30/90/180/365 days) |
+| Nullable timestamps | Decision | Policy Team | Allow null for backward compatibility; null = no decay |
+
+---
+
+## Success Criteria
+
+- [ ] All 6 tasks marked DONE
+- [ ] 6+ decay-related tests passing
+- [ ] Existing 29 tests still passing
+- [ ] `dotnet build` succeeds
+- [ ] `dotnet test` succeeds for `StellaOps.Policy.Unknowns.Tests`
diff --git a/docs/implplan/SPRINT_4000_0001_0002_unknowns_blast_radius_containment.md b/docs/implplan/SPRINT_4000_0001_0002_unknowns_blast_radius_containment.md
new file mode 100644
index 000000000..c862f16d1
--- /dev/null
+++ b/docs/implplan/SPRINT_4000_0001_0002_unknowns_blast_radius_containment.md
@@ -0,0 +1,500 @@
+# Sprint 4000.0001.0002 · Unknowns BlastRadius & Containment Signals
+
+## Topic & Scope
+
+- Add BlastRadius scoring (dependency graph impact) to UnknownRanker
+- Add ContainmentSignals scoring (runtime isolation posture) to UnknownRanker
+- Extends the ranking formula with a containment reduction factor
+
+**Working directory:** `src/Policy/__Libraries/StellaOps.Policy.Unknowns/`
+
+## Dependencies & Concurrency
+
+- **Upstream**: Sprint 4000.0001.0001 (Decay Algorithm) — MUST BE DONE
+- **Downstream**: None
+- **Safe to parallelize with**: Sprint 4000.0002.0001 (EPSS Connector)
+
+## Documentation Prerequisites
+
+- Sprint 4000.0001.0001 completion
+- `src/Policy/__Libraries/StellaOps.Policy.Unknowns/AGENTS.md`
+- `docs/product-advisories/14-Dec-2025 - Triage and Unknowns Technical Reference.md`
+
+---
+
+## Tasks
+
+### T1: Define BlastRadius Model
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Create a new model for blast radius data representing dependency graph impact.
+
+**Implementation Path**: `Models/BlastRadius.cs` (new file)
+
+**Model Definition**:
+```csharp
+namespace StellaOps.Policy.Unknowns.Models;
+
+///
+/// Represents the dependency graph impact of an unknown package.
+/// Data sourced from Scanner/Signals module call graph analysis.
+///
+public sealed record BlastRadius
+{
+ ///
+ /// Number of packages that directly or transitively depend on this package.
+ /// 0 = isolated, higher = more impact if exploited.
+ ///
+ public int Dependents { get; init; }
+
+ ///
+ /// Whether this package is reachable from network-facing entrypoints.
+ /// True = higher risk, False = reduced risk.
+ ///
+ public bool NetFacing { get; init; }
+
+ ///
+ /// Privilege level under which this package typically runs.
+ /// "root" = highest risk, "user" = normal, "none" = lowest.
+ ///
+ public string? Privilege { get; init; }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `BlastRadius.cs` file created in `Models/` directory
+- [ ] Record is immutable with init-only properties
+- [ ] XML documentation describes each property
+- [ ] Namespace is `StellaOps.Policy.Unknowns.Models`
+
+---
+
+### T2: Define ContainmentSignals Model
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Create a new model for runtime containment posture signals.
+
+**Implementation Path**: `Models/ContainmentSignals.cs` (new file)
+
+**Model Definition**:
+```csharp
+namespace StellaOps.Policy.Unknowns.Models;
+
+///
+/// Represents runtime isolation and containment posture signals.
+/// Data sourced from runtime probes (Seccomp, eBPF, container config).
+///
+public sealed record ContainmentSignals
+{
+ ///
+ /// Seccomp profile status: "enforced", "permissive", "disabled", null if unknown.
+ /// "enforced" = reduced risk (limits syscalls).
+ ///
+ public string? Seccomp { get; init; }
+
+ ///
+ /// Filesystem mount mode: "ro" (read-only), "rw" (read-write), null if unknown.
+ /// "ro" = reduced risk (limits persistence).
+ ///
+ public string? FileSystem { get; init; }
+
+ ///
+ /// Network policy status: "isolated", "restricted", "open", null if unknown.
+ /// "isolated" = reduced risk (no egress).
+ ///
+ public string? NetworkPolicy { get; init; }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `ContainmentSignals.cs` file created in `Models/` directory
+- [ ] Record is immutable with init-only properties
+- [ ] All properties nullable (unknown state allowed)
+- [ ] XML documentation describes each property
+
+---
+
+### T3: Extend UnknownRankInput
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T1, T2
+
+**Description**:
+Add blast radius and containment fields to `UnknownRankInput`.
+
+**Implementation Path**: `Services/UnknownRanker.cs`
+
+**Updated Record**:
+```csharp
+public sealed record UnknownRankInput(
+ // Existing fields
+ bool HasVexStatement,
+ bool HasReachabilityData,
+ bool HasConflictingSources,
+ bool IsStaleAdvisory,
+ bool IsInKev,
+ decimal EpssScore,
+ decimal CvssScore,
+ // From Sprint 4000.0001.0001 (Decay)
+ DateTimeOffset? FirstSeenAt,
+ DateTimeOffset? LastEvaluatedAt,
+ DateTimeOffset AsOfDateTime,
+ // NEW: BlastRadius & Containment
+ BlastRadius? BlastRadius,
+ ContainmentSignals? Containment);
+```
+
+**Acceptance Criteria**:
+- [ ] `BlastRadius` nullable field added
+- [ ] `Containment` nullable field added
+- [ ] Both fields default to null (backward compatible)
+- [ ] Existing tests still pass with null values
+
+---
+
+### T4: Implement ComputeContainmentReduction
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T3
+
+**Description**:
+Implement containment-based score reduction logic.
+
+**Implementation Path**: `Services/UnknownRanker.cs`
+
+**Reduction Formula**:
+```csharp
+///
+/// Computes a reduction factor based on containment posture.
+/// Better containment = lower effective risk = score reduction.
+/// Maximum reduction capped at 40%.
+///
+private decimal ComputeContainmentReduction(UnknownRankInput input)
+{
+ decimal reduction = 0m;
+
+ // BlastRadius reductions
+ if (input.BlastRadius is { } blast)
+ {
+ // Isolated package (no dependents) reduces risk
+ if (blast.Dependents == 0)
+ reduction += _options.IsolatedReduction; // default: 0.15
+
+ // Not network-facing reduces risk
+ if (!blast.NetFacing)
+ reduction += _options.NotNetFacingReduction; // default: 0.05
+
+ // Non-root privilege reduces risk
+ if (blast.Privilege is "user" or "none")
+ reduction += _options.NonRootReduction; // default: 0.05
+ }
+
+ // ContainmentSignals reductions
+ if (input.Containment is { } contain)
+ {
+ // Enforced Seccomp reduces risk
+ if (contain.Seccomp == "enforced")
+ reduction += _options.SeccompEnforcedReduction; // default: 0.10
+
+ // Read-only filesystem reduces risk
+ if (contain.FileSystem == "ro")
+ reduction += _options.FsReadOnlyReduction; // default: 0.10
+
+ // Network isolation reduces risk
+ if (contain.NetworkPolicy == "isolated")
+ reduction += _options.NetworkIsolatedReduction; // default: 0.05
+ }
+
+ // Cap at maximum reduction
+ return Math.Min(reduction, _options.MaxContainmentReduction); // default: 0.40
+}
+```
+
+**Score Application**:
+```csharp
+// In Rank() method, after decay:
+var containmentReduction = ComputeContainmentReduction(input);
+var finalScore = Math.Max(0m, decayedScore * (1m - containmentReduction));
+```
+
+**Acceptance Criteria**:
+- [ ] Method computes reduction from BlastRadius and ContainmentSignals
+- [ ] Null inputs contribute 0 reduction
+- [ ] Reduction capped at configurable maximum (default 40%)
+- [ ] All arithmetic uses `decimal`
+- [ ] Reduction applied as multiplier: `score * (1 - reduction)`
+
+---
+
+### T5: Extend UnknownRankerOptions
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T4
+
+**Description**:
+Add containment reduction weight configuration.
+
+**Implementation Path**: `Services/UnknownRanker.cs`
+
+**Updated Options**:
+```csharp
+public sealed class UnknownRankerOptions
+{
+ // Existing band thresholds
+ public decimal HotThreshold { get; set; } = 75m;
+ public decimal WarmThreshold { get; set; } = 50m;
+ public decimal ColdThreshold { get; set; } = 25m;
+
+ // Decay (from Sprint 4000.0001.0001)
+ public bool EnableDecay { get; set; } = true;
+ public IReadOnlyList DecayBuckets { get; set; } = DefaultDecayBuckets;
+
+ // NEW: Containment reduction weights
+ public bool EnableContainmentReduction { get; set; } = true;
+ public decimal IsolatedReduction { get; set; } = 0.15m;
+ public decimal NotNetFacingReduction { get; set; } = 0.05m;
+ public decimal NonRootReduction { get; set; } = 0.05m;
+ public decimal SeccompEnforcedReduction { get; set; } = 0.10m;
+ public decimal FsReadOnlyReduction { get; set; } = 0.10m;
+ public decimal NetworkIsolatedReduction { get; set; } = 0.05m;
+ public decimal MaxContainmentReduction { get; set; } = 0.40m;
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `EnableContainmentReduction` toggle added
+- [ ] Individual reduction weights configurable
+- [ ] `MaxContainmentReduction` cap configurable
+- [ ] Defaults match T4 implementation
+- [ ] DI configuration works
+
+---
+
+### T6: Add DB Migration
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T1, T2
+
+**Description**:
+Add columns to `policy.unknowns` table for blast radius and containment data.
+
+**Implementation Path**: `src/Policy/__Libraries/StellaOps.Policy.Storage.Postgres/migrations/`
+
+**Migration SQL**:
+```sql
+-- Migration: Add blast radius and containment columns to policy.unknowns
+
+ALTER TABLE policy.unknowns
+ADD COLUMN IF NOT EXISTS blast_radius_dependents INT,
+ADD COLUMN IF NOT EXISTS blast_radius_net_facing BOOLEAN,
+ADD COLUMN IF NOT EXISTS blast_radius_privilege TEXT,
+ADD COLUMN IF NOT EXISTS containment_seccomp TEXT,
+ADD COLUMN IF NOT EXISTS containment_fs_mode TEXT,
+ADD COLUMN IF NOT EXISTS containment_network_policy TEXT;
+
+COMMENT ON COLUMN policy.unknowns.blast_radius_dependents IS 'Number of packages depending on this package';
+COMMENT ON COLUMN policy.unknowns.blast_radius_net_facing IS 'Whether reachable from network entrypoints';
+COMMENT ON COLUMN policy.unknowns.blast_radius_privilege IS 'Privilege level: root, user, none';
+COMMENT ON COLUMN policy.unknowns.containment_seccomp IS 'Seccomp status: enforced, permissive, disabled';
+COMMENT ON COLUMN policy.unknowns.containment_fs_mode IS 'Filesystem mode: ro, rw';
+COMMENT ON COLUMN policy.unknowns.containment_network_policy IS 'Network policy: isolated, restricted, open';
+```
+
+**Acceptance Criteria**:
+- [ ] Migration file created with sequential number
+- [ ] All 6 columns added with appropriate types
+- [ ] Column comments added for documentation
+- [ ] Migration is idempotent (IF NOT EXISTS)
+- [ ] RLS policies still apply
+
+---
+
+### T7: Add Containment Tests
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T4, T5
+
+**Description**:
+Add comprehensive tests for containment reduction logic.
+
+**Implementation Path**: `src/Policy/__Tests/StellaOps.Policy.Unknowns.Tests/Services/UnknownRankerTests.cs`
+
+**Test Cases**:
+```csharp
+#region Containment Reduction Tests
+
+[Fact]
+public void ComputeContainmentReduction_NullInputs_ReturnsZero()
+{
+ var input = CreateInputWithContainment(blastRadius: null, containment: null);
+ var result = _ranker.Rank(input);
+ result.ContainmentReduction.Should().Be(0m);
+}
+
+[Fact]
+public void ComputeContainmentReduction_IsolatedPackage_Returns15Percent()
+{
+ var blast = new BlastRadius { Dependents = 0, NetFacing = true };
+ var input = CreateInputWithContainment(blastRadius: blast);
+
+ var result = _ranker.Rank(input);
+ result.ContainmentReduction.Should().Be(0.15m);
+}
+
+[Fact]
+public void ComputeContainmentReduction_AllContainmentFactors_CapsAt40Percent()
+{
+ var blast = new BlastRadius { Dependents = 0, NetFacing = false, Privilege = "none" };
+ var contain = new ContainmentSignals { Seccomp = "enforced", FileSystem = "ro", NetworkPolicy = "isolated" };
+ var input = CreateInputWithContainment(blastRadius: blast, containment: contain);
+
+ // Total would be: 0.15 + 0.05 + 0.05 + 0.10 + 0.10 + 0.05 = 0.50
+ // But capped at 0.40
+ var result = _ranker.Rank(input);
+ result.ContainmentReduction.Should().Be(0.40m);
+}
+
+[Fact]
+public void Rank_WithContainment_AppliesReductionToScore()
+{
+ // Arrange: Create input that would score 60 before containment
+ var blast = new BlastRadius { Dependents = 0 }; // 15% reduction
+ var input = CreateHighScoreInputWithContainment(blast);
+
+ // Act
+ var result = _ranker.Rank(input);
+
+ // Assert: Score reduced by 15%: 60 * 0.85 = 51
+ result.Score.Should().Be(51.00m);
+}
+
+[Fact]
+public void Rank_ContainmentDisabled_NoReduction()
+{
+ var options = new UnknownRankerOptions { EnableContainmentReduction = false };
+ var ranker = new UnknownRanker(Options.Create(options));
+ var blast = new BlastRadius { Dependents = 0 };
+ var input = CreateHighScoreInputWithContainment(blast);
+
+ var result = ranker.Rank(input);
+ result.ContainmentReduction.Should().Be(0m);
+}
+
+#endregion
+```
+
+**Acceptance Criteria**:
+- [ ] Test for null BlastRadius/Containment returns 0 reduction
+- [ ] Test for isolated package (Dependents=0)
+- [ ] Test for cap at 40% maximum
+- [ ] Test verifies reduction applied to final score
+- [ ] Test for `EnableContainmentReduction = false`
+- [ ] All 5+ new tests pass
+
+---
+
+### T8: Document Signal Sources
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T1, T2
+
+**Description**:
+Update AGENTS.md with signal provenance for blast radius and containment.
+
+**Implementation Path**: `src/Policy/__Libraries/StellaOps.Policy.Unknowns/AGENTS.md`
+
+**Documentation to Add**:
+```markdown
+## Signal Sources
+
+### BlastRadius
+- **Source**: Scanner/Signals module call graph analysis
+- **Dependents**: Count of packages in dependency tree
+- **NetFacing**: Reachability from network entrypoints (ASP.NET controllers, gRPC, etc.)
+- **Privilege**: Extracted from container config or runtime probes
+
+### ContainmentSignals
+- **Source**: Runtime probes (eBPF, Seccomp profiles, container inspection)
+- **Seccomp**: Seccomp profile enforcement status
+- **FileSystem**: Mount mode from container spec or /proc/mounts
+- **NetworkPolicy**: Kubernetes NetworkPolicy or firewall rules
+
+### Data Flow
+1. Scanner generates BlastRadius during SBOM analysis
+2. Runtime probes collect ContainmentSignals
+3. Signals stored in `policy.unknowns` table
+4. UnknownRanker reads signals for scoring
+```
+
+**Acceptance Criteria**:
+- [ ] AGENTS.md updated with Signal Sources section
+- [ ] BlastRadius provenance documented
+- [ ] ContainmentSignals provenance documented
+- [ ] Data flow explained
+
+---
+
+## Delivery Tracker
+
+| # | Task ID | Status | Dependency | Owners | Task Definition |
+|---|---------|--------|------------|--------|-----------------|
+| 1 | T1 | TODO | — | Policy Team | Define BlastRadius model |
+| 2 | T2 | TODO | — | Policy Team | Define ContainmentSignals model |
+| 3 | T3 | TODO | T1, T2 | Policy Team | Extend UnknownRankInput |
+| 4 | T4 | TODO | T3 | Policy Team | Implement ComputeContainmentReduction |
+| 5 | T5 | TODO | T4 | Policy Team | Extend UnknownRankerOptions |
+| 6 | T6 | TODO | T1, T2 | Policy Team | Add DB migration |
+| 7 | T7 | TODO | T4, T5 | Policy Team | Add containment tests |
+| 8 | T8 | TODO | T1, T2 | Policy Team | Document signal sources |
+
+---
+
+## Execution Log
+
+| Date (UTC) | Update | Owner |
+|------------|--------|-------|
+| 2025-12-21 | Sprint created from MOAT gap analysis. BlastRadius/ContainmentSignals identified as gap in Triage & Unknowns advisory. | Claude |
+
+---
+
+## Decisions & Risks
+
+| Item | Type | Owner | Notes |
+|------|------|-------|-------|
+| Reduction vs multiplier | Decision | Policy Team | Using reduction (score × (1-reduction)) allows additive containment factors |
+| Maximum cap at 40% | Decision | Policy Team | Prevents well-contained packages from dropping to 0; preserves signal |
+| Nullable signals | Decision | Policy Team | Allow null for unknown containment state; null = no reduction |
+| JSONB vs columns | Decision | Policy Team | Using columns for queryability and indexing |
+
+---
+
+## Success Criteria
+
+- [ ] All 8 tasks marked DONE
+- [ ] 5+ containment-related tests passing
+- [ ] Existing tests still passing (including decay tests from Sprint 1)
+- [ ] Migration applies cleanly
+- [ ] `dotnet build` succeeds
+- [ ] `dotnet test` succeeds
diff --git a/docs/implplan/SPRINT_4000_0002_0001_epss_feed_connector.md b/docs/implplan/SPRINT_4000_0002_0001_epss_feed_connector.md
new file mode 100644
index 000000000..9b262afde
--- /dev/null
+++ b/docs/implplan/SPRINT_4000_0002_0001_epss_feed_connector.md
@@ -0,0 +1,866 @@
+# Sprint 4000.0002.0001 · EPSS Feed Connector
+
+## Topic & Scope
+
+- Create Concelier connector for EPSS (Exploit Prediction Scoring System) feed ingestion
+- Follows three-stage connector pattern: Fetch → Parse → Map
+- Leverages existing `EpssCsvStreamParser` from Scanner module for CSV parsing
+- Integrates with orchestrator for scheduled, rate-limited, airgap-capable ingestion
+
+**Working directory:** `src/Concelier/__Libraries/StellaOps.Concelier.Connector.Epss/`
+
+## Dependencies & Concurrency
+
+- **Upstream**: None (first sprint in batch 0002)
+- **Downstream**: None
+- **Safe to parallelize with**: Sprint 4000.0001.0001 (Decay), Sprint 4000.0001.0002 (Containment)
+
+## Documentation Prerequisites
+
+- `src/Concelier/__Libraries/StellaOps.Concelier.Core/Orchestration/ConnectorMetadata.cs`
+- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Epss/EpssCsvStreamParser.cs` (reuse pattern)
+- Existing connector examples: `StellaOps.Concelier.Connector.CertFr`, `StellaOps.Concelier.Connector.Osv`
+
+---
+
+## Tasks
+
+### T1: Create Project Structure
+
+**Assignee**: Concelier Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Create new connector project following established Concelier patterns.
+
+**Implementation Path**: `src/Concelier/__Libraries/StellaOps.Concelier.Connector.Epss/`
+
+**Project Structure**:
+```
+StellaOps.Concelier.Connector.Epss/
+├── StellaOps.Concelier.Connector.Epss.csproj
+├── EpssConnectorPlugin.cs
+├── EpssDependencyInjectionRoutine.cs
+├── EpssServiceCollectionExtensions.cs
+├── Jobs.cs
+├── Configuration/
+│ └── EpssOptions.cs
+└── Internal/
+ ├── EpssConnector.cs
+ ├── EpssCursor.cs
+ ├── EpssMapper.cs
+ └── EpssDiagnostics.cs
+```
+
+**csproj Definition**:
+```xml
+
+
+ net10.0
+ StellaOps.Concelier.Connector.Epss
+ StellaOps.Concelier.Connector.Epss
+ enable
+ enable
+ preview
+
+
+
+
+
+
+
+```
+
+**Acceptance Criteria**:
+- [ ] Project created with correct structure
+- [ ] References to Concelier.Core and Scanner.Storage added
+- [ ] Compiles successfully
+- [ ] Follows naming conventions
+
+---
+
+### T2: Implement EpssConnectorPlugin
+
+**Assignee**: Concelier Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Implement the plugin entry point for connector registration.
+
+**Implementation Path**: `EpssConnectorPlugin.cs`
+
+**Plugin Definition**:
+```csharp
+using Microsoft.Extensions.DependencyInjection;
+using StellaOps.Concelier.Connector.Epss.Internal;
+using StellaOps.Plugin;
+
+namespace StellaOps.Concelier.Connector.Epss;
+
+///
+/// Plugin entry point for EPSS feed connector.
+/// Provides EPSS probability scores for CVE exploitation.
+///
+public sealed class EpssConnectorPlugin : IConnectorPlugin
+{
+ public const string SourceName = "epss";
+
+ public string Name => SourceName;
+
+ public bool IsAvailable(IServiceProvider services)
+ => services.GetService() is not null;
+
+ public IFeedConnector Create(IServiceProvider services)
+ {
+ ArgumentNullException.ThrowIfNull(services);
+ return services.GetRequiredService();
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] Implements `IConnectorPlugin`
+- [ ] Source name is `"epss"`
+- [ ] Factory method resolves connector from DI
+- [ ] Availability check works correctly
+
+---
+
+### T3: Implement EpssOptions
+
+**Assignee**: Concelier Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Create configuration options for EPSS connector.
+
+**Implementation Path**: `Configuration/EpssOptions.cs`
+
+**Options Definition**:
+```csharp
+namespace StellaOps.Concelier.Connector.Epss.Configuration;
+
+///
+/// Configuration options for EPSS feed connector.
+///
+public sealed class EpssOptions
+{
+ ///
+ /// Configuration section name.
+ ///
+ public const string SectionName = "Concelier:Epss";
+
+ ///
+ /// Base URL for EPSS API/feed.
+ /// Default: https://epss.empiricalsecurity.com/
+ ///
+ public string BaseUrl { get; set; } = "https://epss.empiricalsecurity.com/";
+
+ ///
+ /// Whether to fetch the current day's snapshot or historical.
+ /// Default: true (fetch current).
+ ///
+ public bool FetchCurrent { get; set; } = true;
+
+ ///
+ /// Number of days to look back for initial catch-up.
+ /// Default: 7 days.
+ ///
+ public int CatchUpDays { get; set; } = 7;
+
+ ///
+ /// Request timeout in seconds.
+ /// Default: 120 (2 minutes for large CSV files).
+ ///
+ public int TimeoutSeconds { get; set; } = 120;
+
+ ///
+ /// Maximum retries on transient failure.
+ /// Default: 3.
+ ///
+ public int MaxRetries { get; set; } = 3;
+
+ ///
+ /// Whether to enable offline/airgap mode using bundled data.
+ /// Default: false.
+ ///
+ public bool AirgapMode { get; set; } = false;
+
+ ///
+ /// Path to offline bundle directory (when AirgapMode=true).
+ ///
+ public string? BundlePath { get; set; }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] All configuration options documented
+- [ ] Sensible defaults provided
+- [ ] Airgap mode flag present
+- [ ] Timeout and retry settings included
+
+---
+
+### T4: Implement EpssCursor
+
+**Assignee**: Concelier Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Create cursor model for resumable state tracking.
+
+**Implementation Path**: `Internal/EpssCursor.cs`
+
+**Cursor Definition**:
+```csharp
+namespace StellaOps.Concelier.Connector.Epss.Internal;
+
+///
+/// Resumable cursor state for EPSS connector.
+/// Tracks model version and last processed date for incremental sync.
+///
+public sealed record EpssCursor
+{
+ ///
+ /// EPSS model version tag (e.g., "v2024.12.21").
+ ///
+ public string? ModelVersion { get; init; }
+
+ ///
+ /// Date of the last successfully processed snapshot.
+ ///
+ public DateOnly? LastProcessedDate { get; init; }
+
+ ///
+ /// HTTP ETag of last fetched resource (for conditional requests).
+ ///
+ public string? ETag { get; init; }
+
+ ///
+ /// SHA-256 hash of the last processed CSV content.
+ ///
+ public string? ContentHash { get; init; }
+
+ ///
+ /// Number of CVE scores in the last snapshot.
+ ///
+ public int? LastRowCount { get; init; }
+
+ ///
+ /// Timestamp when cursor was last updated.
+ ///
+ public DateTimeOffset UpdatedAt { get; init; }
+
+ ///
+ /// Creates initial empty cursor.
+ ///
+ public static EpssCursor Empty => new() { UpdatedAt = DateTimeOffset.MinValue };
+}
+```
+
+**Acceptance Criteria**:
+- [ ] Record is immutable
+- [ ] Tracks model version for EPSS updates
+- [ ] Tracks content hash for change detection
+- [ ] Includes ETag for conditional HTTP requests
+- [ ] Has static `Empty` factory
+
+---
+
+### T5: Implement EpssConnector.FetchAsync
+
+**Assignee**: Concelier Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T3, T4
+
+**Description**:
+Implement HTTP fetch stage with ETag/gzip support.
+
+**Implementation Path**: `Internal/EpssConnector.cs`
+
+**Fetch Implementation**:
+```csharp
+using System.Net.Http;
+using Microsoft.Extensions.Logging;
+using Microsoft.Extensions.Options;
+using StellaOps.Concelier.Connector.Epss.Configuration;
+using StellaOps.Concelier.Core.Feeds;
+
+namespace StellaOps.Concelier.Connector.Epss.Internal;
+
+///
+/// EPSS feed connector implementing three-stage Fetch/Parse/Map pattern.
+///
+public sealed partial class EpssConnector : IFeedConnector
+{
+ private readonly HttpClient _httpClient;
+ private readonly EpssOptions _options;
+ private readonly ILogger _logger;
+
+ public EpssConnector(
+ HttpClient httpClient,
+ IOptions options,
+ ILogger logger)
+ {
+ _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
+ _options = options?.Value ?? throw new ArgumentNullException(nameof(options));
+ _logger = logger ?? throw new ArgumentNullException(nameof(logger));
+ }
+
+ ///
+ /// Fetches EPSS CSV snapshot from remote or bundle source.
+ ///
+ public async Task FetchAsync(
+ EpssCursor cursor,
+ CancellationToken cancellationToken)
+ {
+ var targetDate = DateOnly.FromDateTime(DateTime.UtcNow);
+ var fileName = $"epss_scores-{targetDate:yyyy-MM-dd}.csv.gz";
+
+ if (_options.AirgapMode && !string.IsNullOrEmpty(_options.BundlePath))
+ {
+ return FetchFromBundle(fileName);
+ }
+
+ var uri = new Uri(new Uri(_options.BaseUrl), fileName);
+
+ using var request = new HttpRequestMessage(HttpMethod.Get, uri);
+
+ // Conditional fetch if we have ETag
+ if (!string.IsNullOrEmpty(cursor.ETag))
+ {
+ request.Headers.IfNoneMatch.ParseAdd(cursor.ETag);
+ }
+
+ using var response = await _httpClient.SendAsync(
+ request,
+ HttpCompletionOption.ResponseHeadersRead,
+ cancellationToken).ConfigureAwait(false);
+
+ if (response.StatusCode == System.Net.HttpStatusCode.NotModified)
+ {
+ _logger.LogInformation("EPSS snapshot unchanged (304 Not Modified)");
+ return FetchResult.NotModified(cursor);
+ }
+
+ response.EnsureSuccessStatusCode();
+
+ var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
+ var etag = response.Headers.ETag?.Tag;
+
+ return FetchResult.Success(stream, targetDate, etag);
+ }
+
+ private FetchResult FetchFromBundle(string fileName)
+ {
+ var bundlePath = Path.Combine(_options.BundlePath!, fileName);
+ if (!File.Exists(bundlePath))
+ {
+ _logger.LogWarning("EPSS bundle file not found: {Path}", bundlePath);
+ return FetchResult.NotFound(bundlePath);
+ }
+
+ var stream = File.OpenRead(bundlePath);
+ return FetchResult.Success(stream, DateOnly.FromDateTime(DateTime.UtcNow), etag: null);
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] HTTP GET with gzip streaming
+- [ ] Conditional requests using ETag (If-None-Match)
+- [ ] Handles 304 Not Modified response
+- [ ] Airgap mode falls back to bundle
+- [ ] Proper error handling and logging
+
+---
+
+### T6: Implement EpssConnector.ParseAsync
+
+**Assignee**: Concelier Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T5
+
+**Description**:
+Implement CSV parsing stage reusing Scanner's `EpssCsvStreamParser`.
+
+**Implementation Path**: `Internal/EpssConnector.cs` (continued)
+
+**Parse Implementation**:
+```csharp
+using StellaOps.Scanner.Storage.Epss;
+
+public sealed partial class EpssConnector
+{
+ private readonly EpssCsvStreamParser _parser = new();
+
+ ///
+ /// Parses gzip CSV stream into EPSS score rows.
+ /// Reuses Scanner's EpssCsvStreamParser for deterministic parsing.
+ ///
+ public async IAsyncEnumerable ParseAsync(
+ Stream gzipStream,
+ [EnumeratorCancellation] CancellationToken cancellationToken)
+ {
+ ArgumentNullException.ThrowIfNull(gzipStream);
+
+ await using var session = _parser.ParseGzip(gzipStream);
+
+ await foreach (var row in session.WithCancellation(cancellationToken))
+ {
+ yield return row;
+ }
+
+ // Log session metadata
+ _logger.LogInformation(
+ "Parsed EPSS snapshot: ModelVersion={ModelVersion}, Date={Date}, Rows={Rows}, Hash={Hash}",
+ session.ModelVersionTag,
+ session.PublishedDate,
+ session.RowCount,
+ session.DecompressedSha256);
+ }
+
+ ///
+ /// Gets parse session metadata after enumeration.
+ ///
+ public EpssCursor CreateCursorFromSession(
+ EpssCsvStreamParser.EpssCsvParseSession session,
+ string? etag)
+ {
+ return new EpssCursor
+ {
+ ModelVersion = session.ModelVersionTag,
+ LastProcessedDate = session.PublishedDate,
+ ETag = etag,
+ ContentHash = session.DecompressedSha256,
+ LastRowCount = session.RowCount,
+ UpdatedAt = DateTimeOffset.UtcNow
+ };
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] Reuses `EpssCsvStreamParser` from Scanner module
+- [ ] Async enumerable streaming (no full materialization)
+- [ ] Captures session metadata (model version, date, hash)
+- [ ] Creates cursor from parse session
+- [ ] Proper cancellation support
+
+---
+
+### T7: Implement EpssConnector.MapAsync
+
+**Assignee**: Concelier Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T6
+
+**Description**:
+Map parsed EPSS rows to canonical observation records.
+
+**Implementation Path**: `Internal/EpssMapper.cs`
+
+**Mapper Definition**:
+```csharp
+using StellaOps.Concelier.Core.Observations;
+using StellaOps.Scanner.Storage.Epss;
+
+namespace StellaOps.Concelier.Connector.Epss.Internal;
+
+///
+/// Maps EPSS score rows to canonical observation records.
+///
+public static class EpssMapper
+{
+ ///
+ /// Maps a single EPSS score row to an observation.
+ ///
+ public static EpssObservation ToObservation(
+ EpssScoreRow row,
+ string modelVersion,
+ DateOnly publishedDate)
+ {
+ ArgumentNullException.ThrowIfNull(row);
+
+ return new EpssObservation
+ {
+ CveId = row.CveId,
+ Score = (decimal)row.EpssScore,
+ Percentile = (decimal)row.Percentile,
+ ModelVersion = modelVersion,
+ PublishedDate = publishedDate,
+ Band = DetermineBand((decimal)row.EpssScore)
+ };
+ }
+
+ ///
+ /// Determines priority band based on EPSS score.
+ ///
+ private static EpssBand DetermineBand(decimal score) => score switch
+ {
+ >= 0.70m => EpssBand.Critical, // Top 30%: Critical priority
+ >= 0.40m => EpssBand.High, // 40-70%: High priority
+ >= 0.10m => EpssBand.Medium, // 10-40%: Medium priority
+ _ => EpssBand.Low // <10%: Low priority
+ };
+}
+
+///
+/// EPSS observation record.
+///
+public sealed record EpssObservation
+{
+ public required string CveId { get; init; }
+ public required decimal Score { get; init; }
+ public required decimal Percentile { get; init; }
+ public required string ModelVersion { get; init; }
+ public required DateOnly PublishedDate { get; init; }
+ public required EpssBand Band { get; init; }
+}
+
+///
+/// EPSS priority bands.
+///
+public enum EpssBand
+{
+ Low = 0,
+ Medium = 1,
+ High = 2,
+ Critical = 3
+}
+```
+
+**Acceptance Criteria**:
+- [ ] Maps `EpssScoreRow` to `EpssObservation`
+- [ ] Score values converted to `decimal` for consistency
+- [ ] Priority bands assigned based on score thresholds
+- [ ] Model version and date preserved
+- [ ] Immutable record output
+
+---
+
+### T8: Register with WellKnownConnectors
+
+**Assignee**: Concelier Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T2
+
+**Description**:
+Add EPSS to the well-known connectors registry.
+
+**Implementation Path**: `src/Concelier/__Libraries/StellaOps.Concelier.Core/Orchestration/ConnectorRegistrationService.cs`
+
+**Updated WellKnownConnectors**:
+```csharp
+///
+/// EPSS (Exploit Prediction Scoring System) connector metadata.
+///
+public static ConnectorMetadata Epss => new()
+{
+ ConnectorId = "epss",
+ Source = "epss",
+ DisplayName = "EPSS",
+ Description = "FIRST.org Exploit Prediction Scoring System",
+ Capabilities = ["observations"],
+ ArtifactKinds = ["raw-scores", "normalized"],
+ DefaultCron = "0 10 * * *", // Daily at 10:00 UTC (after EPSS publishes ~08:00 UTC)
+ DefaultRpm = 100, // No rate limiting on EPSS feed
+ MaxLagMinutes = 1440, // 24 hours (daily feed)
+ EgressAllowlist = ["epss.empiricalsecurity.com"]
+};
+
+///
+/// Gets metadata for all well-known connectors.
+///
+public static IReadOnlyList All => [Nvd, Ghsa, Osv, Kev, IcsCisa, Epss];
+```
+
+**Acceptance Criteria**:
+- [ ] `Epss` static property added to `WellKnownConnectors`
+- [ ] ConnectorId is `"epss"`
+- [ ] Default cron set to daily 10:00 UTC
+- [ ] Egress allowlist includes `epss.empiricalsecurity.com`
+- [ ] Added to `All` collection
+
+---
+
+### T9: Add Connector Tests
+
+**Assignee**: Concelier Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T5, T6, T7
+
+**Description**:
+Add integration tests with mock HTTP for EPSS connector.
+
+**Implementation Path**: `src/Concelier/__Tests/StellaOps.Concelier.Connector.Epss.Tests/`
+
+**Test Cases**:
+```csharp
+using System.Net;
+using Microsoft.Extensions.Options;
+using StellaOps.Concelier.Connector.Epss.Configuration;
+using StellaOps.Concelier.Connector.Epss.Internal;
+
+namespace StellaOps.Concelier.Connector.Epss.Tests;
+
+public class EpssConnectorTests
+{
+ private static readonly string SampleCsvGz = GetEmbeddedResource("sample_epss.csv.gz");
+
+ [Fact]
+ public async Task FetchAsync_ReturnsStream_OnSuccess()
+ {
+ // Arrange
+ var handler = new MockHttpMessageHandler(SampleCsvGz, HttpStatusCode.OK);
+ var httpClient = new HttpClient(handler);
+ var connector = CreateConnector(httpClient);
+ var cursor = EpssCursor.Empty;
+
+ // Act
+ var result = await connector.FetchAsync(cursor, CancellationToken.None);
+
+ // Assert
+ result.IsSuccess.Should().BeTrue();
+ result.Stream.Should().NotBeNull();
+ }
+
+ [Fact]
+ public async Task FetchAsync_ReturnsNotModified_OnETagMatch()
+ {
+ // Arrange
+ var handler = new MockHttpMessageHandler(status: HttpStatusCode.NotModified);
+ var httpClient = new HttpClient(handler);
+ var connector = CreateConnector(httpClient);
+ var cursor = new EpssCursor { ETag = "\"abc123\"" };
+
+ // Act
+ var result = await connector.FetchAsync(cursor, CancellationToken.None);
+
+ // Assert
+ result.IsNotModified.Should().BeTrue();
+ }
+
+ [Fact]
+ public async Task ParseAsync_YieldsAllRows()
+ {
+ // Arrange
+ await using var stream = GetSampleGzipStream();
+ var connector = CreateConnector();
+
+ // Act
+ var rows = await connector.ParseAsync(stream, CancellationToken.None).ToListAsync();
+
+ // Assert
+ rows.Should().HaveCountGreaterThan(0);
+ rows.Should().AllSatisfy(r =>
+ {
+ r.CveId.Should().StartWith("CVE-");
+ r.EpssScore.Should().BeInRange(0.0, 1.0);
+ r.Percentile.Should().BeInRange(0.0, 1.0);
+ });
+ }
+
+ [Theory]
+ [InlineData(0.75, EpssBand.Critical)]
+ [InlineData(0.50, EpssBand.High)]
+ [InlineData(0.20, EpssBand.Medium)]
+ [InlineData(0.05, EpssBand.Low)]
+ public void ToObservation_AssignsCorrectBand(double score, EpssBand expectedBand)
+ {
+ // Arrange
+ var row = new EpssScoreRow("CVE-2024-12345", score, 0.5);
+
+ // Act
+ var observation = EpssMapper.ToObservation(row, "v2024.12.21", DateOnly.FromDateTime(DateTime.UtcNow));
+
+ // Assert
+ observation.Band.Should().Be(expectedBand);
+ }
+
+ [Fact]
+ public void EpssCursor_Empty_HasMinValue()
+ {
+ // Act
+ var cursor = EpssCursor.Empty;
+
+ // Assert
+ cursor.UpdatedAt.Should().Be(DateTimeOffset.MinValue);
+ cursor.ModelVersion.Should().BeNull();
+ cursor.ContentHash.Should().BeNull();
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] Test for successful fetch with mock HTTP
+- [ ] Test for 304 Not Modified handling
+- [ ] Test for parse yielding all rows
+- [ ] Test for band assignment logic
+- [ ] Test for cursor creation
+- [ ] All 5+ tests pass
+
+---
+
+### T10: Add Airgap Bundle Support
+
+**Assignee**: Concelier Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T5
+
+**Description**:
+Implement offline bundle fallback for airgap deployments.
+
+**Implementation Path**: `Internal/EpssConnector.cs` (update FetchAsync)
+
+**Bundle Convention**:
+```
+/var/stellaops/bundles/epss/
+├── epss_scores-2024-12-21.csv.gz
+├── epss_scores-2024-12-20.csv.gz
+└── manifest.json
+```
+
+**Manifest Schema**:
+```json
+{
+ "source": "epss",
+ "created": "2024-12-21T10:00:00Z",
+ "files": [
+ {
+ "name": "epss_scores-2024-12-21.csv.gz",
+ "modelVersion": "v2024.12.21",
+ "sha256": "sha256:abc123...",
+ "rowCount": 245000
+ }
+ ]
+}
+```
+
+**Acceptance Criteria**:
+- [ ] Bundle path configurable via `EpssOptions.BundlePath`
+- [ ] Falls back to bundle when `AirgapMode = true`
+- [ ] Reads files from bundle directory
+- [ ] Logs warning if bundle file missing
+- [ ] Manifest.json validation optional but recommended
+
+---
+
+### T11: Update Documentation
+
+**Assignee**: Concelier Team
+**Story Points**: 1
+**Status**: TODO
+**Dependencies**: T8
+
+**Description**:
+Add EPSS connector to documentation and create AGENTS.md.
+
+**Implementation Path**:
+- `src/Concelier/__Libraries/StellaOps.Concelier.Connector.Epss/AGENTS.md` (new)
+- `docs/modules/concelier/connectors.md` (update)
+
+**AGENTS.md Content**:
+```markdown
+# AGENTS.md - EPSS Connector
+
+## Purpose
+Ingests EPSS (Exploit Prediction Scoring System) scores from FIRST.org.
+Provides exploitation probability estimates for CVE prioritization.
+
+## Data Source
+- **URL**: https://epss.empiricalsecurity.com/
+- **Format**: CSV.gz (gzip-compressed CSV)
+- **Update Frequency**: Daily (~08:00 UTC)
+- **Coverage**: All CVEs with exploitation telemetry
+
+## Data Flow
+1. Connector fetches daily snapshot (epss_scores-YYYY-MM-DD.csv.gz)
+2. Parses using EpssCsvStreamParser (reused from Scanner)
+3. Maps to EpssObservation records with band classification
+4. Stores in concelier.epss_observations table
+5. Publishes EpssUpdatedEvent for downstream consumers
+
+## Configuration
+```yaml
+Concelier:
+ Epss:
+ BaseUrl: "https://epss.empiricalsecurity.com/"
+ AirgapMode: false
+ BundlePath: "/var/stellaops/bundles/epss"
+```
+
+## Orchestrator Registration
+- ConnectorId: `epss`
+- Default Schedule: Daily 10:00 UTC
+- Egress Allowlist: `epss.empiricalsecurity.com`
+```
+
+**Acceptance Criteria**:
+- [ ] AGENTS.md created in connector directory
+- [ ] Connector added to docs/modules/concelier/connectors.md
+- [ ] Data flow documented
+- [ ] Configuration examples provided
+
+---
+
+## Delivery Tracker
+
+| # | Task ID | Status | Dependency | Owners | Task Definition |
+|---|---------|--------|------------|--------|-----------------|
+| 1 | T1 | TODO | — | Concelier Team | Create project structure |
+| 2 | T2 | TODO | T1 | Concelier Team | Implement EpssConnectorPlugin |
+| 3 | T3 | TODO | T1 | Concelier Team | Implement EpssOptions |
+| 4 | T4 | TODO | T1 | Concelier Team | Implement EpssCursor |
+| 5 | T5 | TODO | T3, T4 | Concelier Team | Implement FetchAsync |
+| 6 | T6 | TODO | T5 | Concelier Team | Implement ParseAsync |
+| 7 | T7 | TODO | T6 | Concelier Team | Implement MapAsync |
+| 8 | T8 | TODO | T2 | Concelier Team | Register with WellKnownConnectors |
+| 9 | T9 | TODO | T5, T6, T7 | Concelier Team | Add connector tests |
+| 10 | T10 | TODO | T5 | Concelier Team | Add airgap bundle support |
+| 11 | T11 | TODO | T8 | Concelier Team | Update documentation |
+
+---
+
+## Execution Log
+
+| Date (UTC) | Update | Owner |
+|------------|--------|-------|
+| 2025-12-21 | Sprint created from MOAT gap analysis. EPSS connector identified as gap in orchestrated feed ingestion. | Claude |
+
+---
+
+## Decisions & Risks
+
+| Item | Type | Owner | Notes |
+|------|------|-------|-------|
+| Reuse EpssCsvStreamParser | Decision | Concelier Team | Avoids duplication; Scanner parser already tested and optimized |
+| Separate project vs Scanner extension | Decision | Concelier Team | New Concelier connector aligns with orchestrator pattern |
+| Daily vs hourly schedule | Decision | Concelier Team | EPSS publishes daily; no benefit to more frequent polling |
+| Band thresholds | Decision | Concelier Team | 0.70/0.40/0.10 aligned with EPSS community recommendations |
+
+---
+
+## Success Criteria
+
+- [ ] All 11 tasks marked DONE
+- [ ] 5+ connector tests passing
+- [ ] `dotnet build` succeeds for connector project
+- [ ] Connector registered in WellKnownConnectors
+- [ ] Airgap bundle fallback works
+- [ ] AGENTS.md created
+
diff --git a/docs/implplan/SPRINT_4100_0001_0001_reason_coded_unknowns.md b/docs/implplan/SPRINT_4100_0001_0001_reason_coded_unknowns.md
new file mode 100644
index 000000000..05113a27f
--- /dev/null
+++ b/docs/implplan/SPRINT_4100_0001_0001_reason_coded_unknowns.md
@@ -0,0 +1,489 @@
+# Sprint 4100.0001.0001 · Reason-Coded Unknowns
+
+## Topic & Scope
+
+- Define structured reason codes for why a component is marked "unknown"
+- Add remediation hints that map to each reason code
+- Enable actionable triage by categorizing uncertainty sources
+
+**Working directory:** `src/Policy/__Libraries/StellaOps.Policy.Unknowns/`
+
+## Dependencies & Concurrency
+
+- **Upstream**: None (first sprint in batch)
+- **Downstream**: Sprint 4100.0001.0002 (Unknown Budgets), Sprint 4100.0001.0003 (Unknowns in Attestations)
+- **Safe to parallelize with**: Sprint 4100.0002.0001, Sprint 4100.0003.0001, Sprint 4100.0004.0002
+
+## Documentation Prerequisites
+
+- `src/Policy/__Libraries/StellaOps.Policy.Unknowns/AGENTS.md`
+- `docs/product-advisories/19-Dec-2025 - Moat #5.md` (Unknowns as First-Class Risk)
+- `docs/product-advisories/archived/2025-12-21-moat-gap-closure/14-Dec-2025 - Triage and Unknowns Technical Reference.md`
+
+---
+
+## Tasks
+
+### T1: Define UnknownReasonCode Enum
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Create an enumeration defining the canonical reason codes for unknowns.
+
+**Implementation Path**: `Models/UnknownReasonCode.cs` (new file)
+
+**Model Definition**:
+```csharp
+namespace StellaOps.Policy.Unknowns.Models;
+
+///
+/// Canonical reason codes explaining why a component is marked as "unknown".
+/// Each code maps to a specific remediation action.
+///
+public enum UnknownReasonCode
+{
+ ///
+ /// U-RCH: Call path analysis is indeterminate.
+ /// The reachability analyzer cannot confirm or deny exploitability.
+ ///
+ Reachability,
+
+ ///
+ /// U-ID: Ambiguous package identity or missing digest.
+ /// Cannot uniquely identify the component (e.g., missing PURL, no checksum).
+ ///
+ Identity,
+
+ ///
+ /// U-PROV: Cannot map binary artifact to source repository.
+ /// Provenance chain is broken or unavailable.
+ ///
+ Provenance,
+
+ ///
+ /// U-VEX: VEX statements conflict or missing applicability data.
+ /// Multiple VEX sources disagree or no VEX coverage exists.
+ ///
+ VexConflict,
+
+ ///
+ /// U-FEED: Required knowledge source is missing or stale.
+ /// Advisory feed gap (e.g., no NVD/OSV data for this package).
+ ///
+ FeedGap,
+
+ ///
+ /// U-CONFIG: Feature flag or configuration not observable.
+ /// Cannot determine if vulnerable code path is enabled at runtime.
+ ///
+ ConfigUnknown,
+
+ ///
+ /// U-ANALYZER: Language or framework not supported by analyzer.
+ /// Static analysis tools do not cover this ecosystem.
+ ///
+ AnalyzerLimit
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `UnknownReasonCode.cs` file created in `Models/` directory
+- [ ] 7 reason codes defined with XML documentation
+- [ ] Each code has a short prefix (U-RCH, U-ID, etc.) documented
+- [ ] Namespace is `StellaOps.Policy.Unknowns.Models`
+
+---
+
+### T2: Extend Unknown Model
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Add reason code, remediation hint, evidence references, and assumptions to the Unknown model.
+
+**Implementation Path**: `Models/Unknown.cs`
+
+**Updated Model**:
+```csharp
+public sealed record Unknown
+{
+ // Existing fields
+ public Guid Id { get; init; }
+ public string PackageUrl { get; init; }
+ public string? CveId { get; init; }
+ public decimal Score { get; init; }
+ public UnknownBand Band { get; init; }
+
+ // NEW: Reason code explaining why this is unknown
+ public UnknownReasonCode ReasonCode { get; init; }
+
+ // NEW: Human-readable remediation guidance
+ public string? RemediationHint { get; init; }
+
+ // NEW: References to evidence that led to unknown classification
+ public IReadOnlyList EvidenceRefs { get; init; } = [];
+
+ // NEW: Assumptions made during analysis (for audit trail)
+ public IReadOnlyList Assumptions { get; init; } = [];
+}
+
+///
+/// Reference to evidence supporting unknown classification.
+///
+public sealed record EvidenceRef(
+ string Type, // "reachability", "vex", "sbom", "feed"
+ string Uri, // Location of evidence
+ string? Digest); // Content hash if applicable
+```
+
+**Acceptance Criteria**:
+- [ ] `ReasonCode` field added to `Unknown` record
+- [ ] `RemediationHint` nullable string field added
+- [ ] `EvidenceRefs` collection added with `EvidenceRef` record
+- [ ] `Assumptions` string collection added
+- [ ] All new fields have XML documentation
+- [ ] Existing tests still pass with default values
+
+---
+
+### T3: Create RemediationHintsRegistry
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Create a registry that maps reason codes to actionable remediation hints.
+
+**Implementation Path**: `Services/RemediationHintsRegistry.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.Unknowns.Services;
+
+///
+/// Registry of remediation hints for each unknown reason code.
+/// Provides actionable guidance for resolving unknowns.
+///
+public sealed class RemediationHintsRegistry : IRemediationHintsRegistry
+{
+ private static readonly IReadOnlyDictionary _hints =
+ new Dictionary
+ {
+ [UnknownReasonCode.Reachability] = new(
+ ShortHint: "Run reachability analysis",
+ DetailedHint: "Execute call-graph analysis to determine if vulnerable code paths are reachable from application entrypoints.",
+ AutomationRef: "stella analyze --reachability"),
+
+ [UnknownReasonCode.Identity] = new(
+ ShortHint: "Add package digest",
+ DetailedHint: "Ensure SBOM includes package checksums (SHA-256) and valid PURL coordinates.",
+ AutomationRef: "stella sbom --include-digests"),
+
+ [UnknownReasonCode.Provenance] = new(
+ ShortHint: "Add provenance attestation",
+ DetailedHint: "Generate SLSA provenance linking binary artifact to source repository and build.",
+ AutomationRef: "stella attest --provenance"),
+
+ [UnknownReasonCode.VexConflict] = new(
+ ShortHint: "Publish authoritative VEX",
+ DetailedHint: "Create or update VEX document with applicability assessment for your deployment context.",
+ AutomationRef: "stella vex create"),
+
+ [UnknownReasonCode.FeedGap] = new(
+ ShortHint: "Add advisory source",
+ DetailedHint: "Configure additional advisory feeds (OSV, vendor-specific) or request coverage from upstream.",
+ AutomationRef: "stella feed add"),
+
+ [UnknownReasonCode.ConfigUnknown] = new(
+ ShortHint: "Document feature flags",
+ DetailedHint: "Export runtime configuration showing which features are enabled/disabled in this deployment.",
+ AutomationRef: "stella config export"),
+
+ [UnknownReasonCode.AnalyzerLimit] = new(
+ ShortHint: "Request analyzer support",
+ DetailedHint: "This language/framework is not yet supported. File an issue or use manual assessment.",
+ AutomationRef: null)
+ };
+
+ public RemediationHint GetHint(UnknownReasonCode code) =>
+ _hints.TryGetValue(code, out var hint) ? hint : RemediationHint.Empty;
+
+ public IEnumerable<(UnknownReasonCode Code, RemediationHint Hint)> GetAllHints() =>
+ _hints.Select(kv => (kv.Key, kv.Value));
+}
+
+public sealed record RemediationHint(
+ string ShortHint,
+ string DetailedHint,
+ string? AutomationRef)
+{
+ public static RemediationHint Empty { get; } = new("No remediation available", "", null);
+}
+
+public interface IRemediationHintsRegistry
+{
+ RemediationHint GetHint(UnknownReasonCode code);
+ IEnumerable<(UnknownReasonCode Code, RemediationHint Hint)> GetAllHints();
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `RemediationHintsRegistry.cs` created in `Services/`
+- [ ] All 7 reason codes have mapped hints
+- [ ] Each hint includes short hint, detailed hint, and optional automation reference
+- [ ] Interface `IRemediationHintsRegistry` defined for DI
+- [ ] Registry is thread-safe (immutable dictionary)
+
+---
+
+### T4: Update UnknownRanker
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T2, T3
+
+**Description**:
+Update the UnknownRanker to emit reason codes and remediation hints on ranking.
+
+**Implementation Path**: `Services/UnknownRanker.cs`
+
+**Updated Input**:
+```csharp
+public sealed record UnknownRankInput(
+ // Existing fields
+ bool HasVexStatement,
+ bool HasReachabilityData,
+ bool HasConflictingSources,
+ bool IsStaleAdvisory,
+ bool IsInKev,
+ decimal EpssScore,
+ decimal CvssScore,
+ DateTimeOffset? FirstSeenAt,
+ DateTimeOffset? LastEvaluatedAt,
+ DateTimeOffset AsOfDateTime,
+ BlastRadius? BlastRadius,
+ ContainmentSignals? Containment,
+ // NEW: Reason classification inputs
+ bool HasPackageDigest,
+ bool HasProvenanceAttestation,
+ bool HasVexConflicts,
+ bool HasFeedCoverage,
+ bool HasConfigVisibility,
+ bool IsAnalyzerSupported);
+```
+
+**Reason Code Assignment Logic**:
+```csharp
+///
+/// Determines the primary reason code for unknown classification.
+/// Returns the most actionable/resolvable reason.
+///
+private UnknownReasonCode DetermineReasonCode(UnknownRankInput input)
+{
+ // Priority order: most actionable first
+ if (!input.IsAnalyzerSupported)
+ return UnknownReasonCode.AnalyzerLimit;
+
+ if (!input.HasReachabilityData)
+ return UnknownReasonCode.Reachability;
+
+ if (!input.HasPackageDigest)
+ return UnknownReasonCode.Identity;
+
+ if (!input.HasProvenanceAttestation)
+ return UnknownReasonCode.Provenance;
+
+ if (input.HasVexConflicts || !input.HasVexStatement)
+ return UnknownReasonCode.VexConflict;
+
+ if (!input.HasFeedCoverage)
+ return UnknownReasonCode.FeedGap;
+
+ if (!input.HasConfigVisibility)
+ return UnknownReasonCode.ConfigUnknown;
+
+ // Default to reachability if no specific reason
+ return UnknownReasonCode.Reachability;
+}
+```
+
+**Updated Result**:
+```csharp
+public sealed record UnknownRankResult(
+ decimal Score,
+ decimal UncertaintyFactor,
+ decimal ExploitPressure,
+ UnknownBand Band,
+ decimal DecayFactor = 1.0m,
+ decimal ContainmentReduction = 0m,
+ // NEW: Reason code and hint
+ UnknownReasonCode ReasonCode = UnknownReasonCode.Reachability,
+ string? RemediationHint = null);
+```
+
+**Acceptance Criteria**:
+- [ ] `UnknownRankInput` extended with reason classification inputs
+- [ ] `DetermineReasonCode` method implemented with priority logic
+- [ ] `UnknownRankResult` extended with `ReasonCode` and `RemediationHint`
+- [ ] Ranker uses `IRemediationHintsRegistry` to populate hints
+- [ ] Existing tests updated for new input/output fields
+
+---
+
+### T5: Add DB Migration
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T1, T2
+
+**Description**:
+Add columns to `policy.unknowns` table for reason code and remediation hint.
+
+**Implementation Path**: `src/Policy/__Libraries/StellaOps.Policy.Storage.Postgres/migrations/`
+
+**Migration SQL**:
+```sql
+-- Migration: Add reason code and remediation columns to policy.unknowns
+
+ALTER TABLE policy.unknowns
+ADD COLUMN IF NOT EXISTS reason_code TEXT,
+ADD COLUMN IF NOT EXISTS remediation_hint TEXT,
+ADD COLUMN IF NOT EXISTS evidence_refs JSONB DEFAULT '[]',
+ADD COLUMN IF NOT EXISTS assumptions JSONB DEFAULT '[]';
+
+-- Create index for querying by reason code
+CREATE INDEX IF NOT EXISTS idx_unknowns_reason_code
+ON policy.unknowns(reason_code)
+WHERE reason_code IS NOT NULL;
+
+COMMENT ON COLUMN policy.unknowns.reason_code IS 'Canonical reason code: Reachability, Identity, Provenance, VexConflict, FeedGap, ConfigUnknown, AnalyzerLimit';
+COMMENT ON COLUMN policy.unknowns.remediation_hint IS 'Actionable guidance for resolving this unknown';
+COMMENT ON COLUMN policy.unknowns.evidence_refs IS 'JSON array of evidence references supporting classification';
+COMMENT ON COLUMN policy.unknowns.assumptions IS 'JSON array of assumptions made during analysis';
+```
+
+**Acceptance Criteria**:
+- [ ] Migration file created with sequential number
+- [ ] `reason_code` TEXT column added
+- [ ] `remediation_hint` TEXT column added
+- [ ] `evidence_refs` JSONB column added with default
+- [ ] `assumptions` JSONB column added with default
+- [ ] Index created for reason_code queries
+- [ ] Column comments added for documentation
+- [ ] Migration is idempotent (IF NOT EXISTS)
+- [ ] RLS policies still apply
+
+---
+
+### T6: Update API DTOs
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T4
+
+**Description**:
+Include reason codes and remediation hints in API response DTOs.
+
+**Implementation Path**: `src/Policy/StellaOps.Policy.WebService/Controllers/UnknownsController.cs`
+
+**Updated DTO**:
+```csharp
+public sealed record UnknownDto
+{
+ public Guid Id { get; init; }
+ public string PackageUrl { get; init; }
+ public string? CveId { get; init; }
+ public decimal Score { get; init; }
+ public string Band { get; init; }
+ // NEW fields
+ public string ReasonCode { get; init; }
+ public string ReasonCodeShort { get; init; } // e.g., "U-RCH"
+ public string? RemediationHint { get; init; }
+ public string? DetailedHint { get; init; }
+ public string? AutomationCommand { get; init; }
+ public IReadOnlyList EvidenceRefs { get; init; }
+}
+
+public sealed record EvidenceRefDto(
+ string Type,
+ string Uri,
+ string? Digest);
+```
+
+**Short Code Mapping**:
+```csharp
+private static readonly IReadOnlyDictionary ShortCodes = new Dictionary
+{
+ [UnknownReasonCode.Reachability] = "U-RCH",
+ [UnknownReasonCode.Identity] = "U-ID",
+ [UnknownReasonCode.Provenance] = "U-PROV",
+ [UnknownReasonCode.VexConflict] = "U-VEX",
+ [UnknownReasonCode.FeedGap] = "U-FEED",
+ [UnknownReasonCode.ConfigUnknown] = "U-CONFIG",
+ [UnknownReasonCode.AnalyzerLimit] = "U-ANALYZER"
+};
+```
+
+**Acceptance Criteria**:
+- [ ] `UnknownDto` extended with reason code fields
+- [ ] Short code (U-RCH, U-ID, etc.) included in response
+- [ ] Remediation hint fields included
+- [ ] Evidence references included as array
+- [ ] OpenAPI spec updated
+- [ ] Response schema validated
+
+---
+
+## Delivery Tracker
+
+| # | Task ID | Status | Dependency | Owners | Task Definition |
+|---|---------|--------|------------|--------|-----------------|
+| 1 | T1 | TODO | — | Policy Team | Define UnknownReasonCode enum |
+| 2 | T2 | TODO | T1 | Policy Team | Extend Unknown model |
+| 3 | T3 | TODO | T1 | Policy Team | Create RemediationHintsRegistry |
+| 4 | T4 | TODO | T2, T3 | Policy Team | Update UnknownRanker |
+| 5 | T5 | TODO | T1, T2 | Policy Team | Add DB migration |
+| 6 | T6 | TODO | T4 | Policy Team | Update API DTOs |
+
+---
+
+## Execution Log
+
+| Date (UTC) | Update | Owner |
+|------------|--------|-------|
+| 2025-12-21 | Sprint created from MOAT Phase 2 gap analysis. Reason-coded unknowns identified as requirement from Moat #5 advisory. | Claude |
+
+---
+
+## Decisions & Risks
+
+| Item | Type | Owner | Notes |
+|------|------|-------|-------|
+| 7 reason codes | Decision | Policy Team | Covers all identified uncertainty sources; extensible if needed |
+| Priority ordering | Decision | Policy Team | Most actionable/resolvable reasons assigned first |
+| Short codes (U-*) | Decision | Policy Team | Human-readable prefixes for triage dashboards |
+| JSONB for arrays | Decision | Policy Team | Flexible schema for evidence refs and assumptions |
+
+---
+
+## Success Criteria
+
+- [ ] All 6 tasks marked DONE
+- [ ] 7 reason codes defined and documented
+- [ ] Remediation hints mapped for all codes
+- [ ] API returns reason codes in responses
+- [ ] Migration applies cleanly
+- [ ] `dotnet build` succeeds
+- [ ] `dotnet test` succeeds for `StellaOps.Policy.Unknowns.Tests`
diff --git a/docs/implplan/SPRINT_4100_0001_0002_unknown_budgets.md b/docs/implplan/SPRINT_4100_0001_0002_unknown_budgets.md
new file mode 100644
index 000000000..52b900ddd
--- /dev/null
+++ b/docs/implplan/SPRINT_4100_0001_0002_unknown_budgets.md
@@ -0,0 +1,659 @@
+# Sprint 4100.0001.0002 · Unknown Budgets & Environment Thresholds
+
+## Topic & Scope
+
+- Define environment-aware unknown budgets (prod: strict, stage: moderate, dev: permissive)
+- Implement budget enforcement with block/warn actions
+- Enable policy-driven control over acceptable unknown counts
+
+**Working directory:** `src/Policy/__Libraries/StellaOps.Policy.Unknowns/`
+
+## Dependencies & Concurrency
+
+- **Upstream**: Sprint 4100.0001.0001 (Reason-Coded Unknowns) — MUST BE DONE
+- **Downstream**: Sprint 4100.0001.0003 (Unknowns in Attestations)
+- **Safe to parallelize with**: Sprint 4100.0002.0002, Sprint 4100.0003.0002
+
+## Documentation Prerequisites
+
+- Sprint 4100.0001.0001 completion
+- `src/Policy/__Libraries/StellaOps.Policy.Unknowns/AGENTS.md`
+- `docs/product-advisories/19-Dec-2025 - Moat #5.md` (Unknowns as First-Class Risk)
+
+---
+
+## Tasks
+
+### T1: Define UnknownBudget Model
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Create a model representing unknown budgets with environment-specific thresholds.
+
+**Implementation Path**: `Models/UnknownBudget.cs` (new file)
+
+**Model Definition**:
+```csharp
+namespace StellaOps.Policy.Unknowns.Models;
+
+///
+/// Represents an unknown budget for a specific environment.
+/// Budgets define maximum acceptable unknown counts by reason code.
+///
+public sealed record UnknownBudget
+{
+ ///
+ /// Environment name: "prod", "stage", "dev", or custom.
+ ///
+ public required string Environment { get; init; }
+
+ ///
+ /// Maximum total unknowns allowed across all reason codes.
+ ///
+ public int? TotalLimit { get; init; }
+
+ ///
+ /// Per-reason-code limits. Missing codes inherit from TotalLimit.
+ ///
+ public IReadOnlyDictionary ReasonLimits { get; init; }
+ = new Dictionary();
+
+ ///
+ /// Action when budget is exceeded.
+ ///
+ public BudgetAction Action { get; init; } = BudgetAction.Warn;
+
+ ///
+ /// Custom message to display when budget is exceeded.
+ ///
+ public string? ExceededMessage { get; init; }
+}
+
+///
+/// Action to take when unknown budget is exceeded.
+///
+public enum BudgetAction
+{
+ ///
+ /// Log warning only, do not block.
+ ///
+ Warn,
+
+ ///
+ /// Block the operation (fail policy evaluation).
+ ///
+ Block,
+
+ ///
+ /// Warn but allow if exception is applied.
+ ///
+ WarnUnlessException
+}
+
+///
+/// Result of checking unknowns against a budget.
+///
+public sealed record BudgetCheckResult
+{
+ public required bool IsWithinBudget { get; init; }
+ public required BudgetAction RecommendedAction { get; init; }
+ public required int TotalUnknowns { get; init; }
+ public int? TotalLimit { get; init; }
+ public IReadOnlyDictionary Violations { get; init; }
+ = new Dictionary();
+ public string? Message { get; init; }
+}
+
+///
+/// Details of a specific budget violation.
+///
+public sealed record BudgetViolation(
+ UnknownReasonCode ReasonCode,
+ int Count,
+ int Limit);
+```
+
+**Acceptance Criteria**:
+- [ ] `UnknownBudget.cs` file created in `Models/` directory
+- [ ] Budget supports total and per-reason limits
+- [ ] `BudgetAction` enum with Warn, Block, WarnUnlessException
+- [ ] `BudgetCheckResult` captures violation details
+- [ ] XML documentation on all types
+
+---
+
+### T2: Create UnknownBudgetService
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Implement service for retrieving budgets and checking compliance.
+
+**Implementation Path**: `Services/UnknownBudgetService.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.Unknowns.Services;
+
+///
+/// Service for managing and checking unknown budgets.
+///
+public sealed class UnknownBudgetService : IUnknownBudgetService
+{
+ private readonly IOptionsMonitor _options;
+ private readonly ILogger _logger;
+
+ public UnknownBudgetService(
+ IOptionsMonitor options,
+ ILogger logger)
+ {
+ _options = options;
+ _logger = logger;
+ }
+
+ ///
+ /// Gets the budget configuration for a specific environment.
+ /// Falls back to default if environment not found.
+ ///
+ public UnknownBudget GetBudgetForEnvironment(string environment)
+ {
+ var budgets = _options.CurrentValue.Budgets;
+
+ if (budgets.TryGetValue(environment, out var budget))
+ return budget;
+
+ if (budgets.TryGetValue("default", out var defaultBudget))
+ return defaultBudget with { Environment = environment };
+
+ // Permissive fallback if no configuration
+ return new UnknownBudget
+ {
+ Environment = environment,
+ TotalLimit = null,
+ Action = BudgetAction.Warn
+ };
+ }
+
+ ///
+ /// Checks a collection of unknowns against the budget for an environment.
+ ///
+ public BudgetCheckResult CheckBudget(
+ string environment,
+ IReadOnlyList unknowns)
+ {
+ var budget = GetBudgetForEnvironment(environment);
+ var violations = new Dictionary();
+ var total = unknowns.Count;
+
+ // Check per-reason-code limits
+ var byReason = unknowns
+ .GroupBy(u => u.ReasonCode)
+ .ToDictionary(g => g.Key, g => g.Count());
+
+ foreach (var (code, limit) in budget.ReasonLimits)
+ {
+ if (byReason.TryGetValue(code, out var count) && count > limit)
+ {
+ violations[code] = new BudgetViolation(code, count, limit);
+ }
+ }
+
+ // Check total limit
+ var isWithinBudget = violations.Count == 0 &&
+ (!budget.TotalLimit.HasValue || total <= budget.TotalLimit.Value);
+
+ var message = isWithinBudget
+ ? null
+ : budget.ExceededMessage ?? $"Unknown budget exceeded: {total} unknowns in {environment}";
+
+ return new BudgetCheckResult
+ {
+ IsWithinBudget = isWithinBudget,
+ RecommendedAction = isWithinBudget ? BudgetAction.Warn : budget.Action,
+ TotalUnknowns = total,
+ TotalLimit = budget.TotalLimit,
+ Violations = violations,
+ Message = message
+ };
+ }
+
+ ///
+ /// Checks if an operation should be blocked based on budget result.
+ ///
+ public bool ShouldBlock(BudgetCheckResult result) =>
+ !result.IsWithinBudget && result.RecommendedAction == BudgetAction.Block;
+}
+
+public interface IUnknownBudgetService
+{
+ UnknownBudget GetBudgetForEnvironment(string environment);
+ BudgetCheckResult CheckBudget(string environment, IReadOnlyList unknowns);
+ bool ShouldBlock(BudgetCheckResult result);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `UnknownBudgetService.cs` created in `Services/`
+- [ ] `GetBudgetForEnvironment` with fallback logic
+- [ ] `CheckBudget` aggregates violations by reason code
+- [ ] `ShouldBlock` helper method
+- [ ] Interface defined for DI
+
+---
+
+### T3: Implement Budget Checking Logic
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T2
+
+**Description**:
+Implement the detailed budget checking with block/warn decision logic.
+
+**Implementation Path**: `Services/UnknownBudgetService.cs`
+
+**Extended Logic**:
+```csharp
+///
+/// Performs comprehensive budget check with environment escalation.
+///
+public BudgetCheckResult CheckBudgetWithEscalation(
+ string environment,
+ IReadOnlyList unknowns,
+ IReadOnlyList? exceptions = null)
+{
+ var baseResult = CheckBudget(environment, unknowns);
+
+ if (baseResult.IsWithinBudget)
+ return baseResult;
+
+ // Check if exceptions cover the violations
+ if (exceptions?.Count > 0)
+ {
+ var coveredReasons = exceptions
+ .Where(e => e.Status == ExceptionStatus.Approved)
+ .SelectMany(e => e.CoveredReasonCodes)
+ .ToHashSet();
+
+ var uncoveredViolations = baseResult.Violations
+ .Where(v => !coveredReasons.Contains(v.Key))
+ .ToDictionary(v => v.Key, v => v.Value);
+
+ if (uncoveredViolations.Count == 0)
+ {
+ return baseResult with
+ {
+ IsWithinBudget = true,
+ RecommendedAction = BudgetAction.Warn,
+ Message = "Budget exceeded but covered by approved exceptions"
+ };
+ }
+ }
+
+ // Log the violation for observability
+ _logger.LogWarning(
+ "Unknown budget exceeded for environment {Environment}: {Total}/{Limit}",
+ environment, baseResult.TotalUnknowns, baseResult.TotalLimit);
+
+ return baseResult;
+}
+
+///
+/// Gets a summary of budget status for reporting.
+///
+public BudgetStatusSummary GetBudgetStatus(
+ string environment,
+ IReadOnlyList unknowns)
+{
+ var budget = GetBudgetForEnvironment(environment);
+ var result = CheckBudget(environment, unknowns);
+
+ return new BudgetStatusSummary
+ {
+ Environment = environment,
+ TotalUnknowns = unknowns.Count,
+ TotalLimit = budget.TotalLimit,
+ PercentageUsed = budget.TotalLimit.HasValue
+ ? (decimal)unknowns.Count / budget.TotalLimit.Value * 100
+ : 0m,
+ IsExceeded = !result.IsWithinBudget,
+ ViolationCount = result.Violations.Count,
+ ByReasonCode = unknowns
+ .GroupBy(u => u.ReasonCode)
+ .ToDictionary(g => g.Key, g => g.Count())
+ };
+}
+
+public sealed record BudgetStatusSummary
+{
+ public required string Environment { get; init; }
+ public required int TotalUnknowns { get; init; }
+ public int? TotalLimit { get; init; }
+ public decimal PercentageUsed { get; init; }
+ public bool IsExceeded { get; init; }
+ public int ViolationCount { get; init; }
+ public IReadOnlyDictionary ByReasonCode { get; init; }
+ = new Dictionary();
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `CheckBudgetWithEscalation` supports exception coverage
+- [ ] Approved exceptions can cover specific reason codes
+- [ ] Violations logged for observability
+- [ ] `GetBudgetStatus` returns summary for dashboards
+- [ ] Percentage calculation for budget utilization
+
+---
+
+### T4: Add Policy Configuration
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Define YAML configuration schema for unknown budgets.
+
+**Implementation Path**: `Configuration/UnknownBudgetOptions.cs` (new file)
+
+**Options Class**:
+```csharp
+namespace StellaOps.Policy.Unknowns.Configuration;
+
+///
+/// Configuration options for unknown budgets.
+///
+public sealed class UnknownBudgetOptions
+{
+ public const string SectionName = "UnknownBudgets";
+
+ ///
+ /// Budget configurations keyed by environment name.
+ ///
+ public Dictionary Budgets { get; set; } = new();
+
+ ///
+ /// Whether to enforce budgets (false = warn only).
+ ///
+ public bool EnforceBudgets { get; set; } = true;
+}
+```
+
+**Sample YAML Configuration**:
+```yaml
+# etc/policy.unknowns.yaml
+unknownBudgets:
+ enforceBudgets: true
+ budgets:
+ prod:
+ environment: prod
+ totalLimit: 3
+ reasonLimits:
+ Reachability: 0
+ Provenance: 0
+ VexConflict: 1
+ action: Block
+ exceededMessage: "Production requires zero reachability unknowns"
+
+ stage:
+ environment: stage
+ totalLimit: 10
+ reasonLimits:
+ Reachability: 1
+ action: WarnUnlessException
+
+ dev:
+ environment: dev
+ totalLimit: null # No limit
+ action: Warn
+
+ default:
+ environment: default
+ totalLimit: 5
+ action: Warn
+```
+
+**DI Registration**:
+```csharp
+// In startup/DI configuration
+services.Configure(
+ configuration.GetSection(UnknownBudgetOptions.SectionName));
+services.AddSingleton();
+```
+
+**Acceptance Criteria**:
+- [ ] `UnknownBudgetOptions.cs` created in `Configuration/`
+- [ ] Options bind from YAML configuration
+- [ ] Sample configuration documented
+- [ ] `EnforceBudgets` toggle for global enable/disable
+- [ ] Default budget fallback defined
+
+---
+
+### T5: Integrate with PolicyEvaluator
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T2, T3
+
+**Description**:
+Integrate unknown budget checking into the policy evaluation pipeline.
+
+**Implementation Path**: `src/Policy/StellaOps.Policy.Engine/Services/PolicyEvaluator.cs`
+
+**Integration Points**:
+```csharp
+public sealed class PolicyEvaluator
+{
+ private readonly IUnknownBudgetService _budgetService;
+
+ public async Task EvaluateAsync(
+ PolicyEvaluationRequest request,
+ CancellationToken ct = default)
+ {
+ // ... existing evaluation logic ...
+
+ // Check unknown budgets
+ var budgetResult = _budgetService.CheckBudgetWithEscalation(
+ request.Environment,
+ unknowns,
+ request.AppliedExceptions);
+
+ if (_budgetService.ShouldBlock(budgetResult))
+ {
+ return PolicyEvaluationResult.Fail(
+ PolicyFailureReason.UnknownBudgetExceeded,
+ budgetResult.Message,
+ new UnknownBudgetViolation(budgetResult));
+ }
+
+ // Include budget status in result
+ return result with
+ {
+ UnknownBudgetStatus = new BudgetStatusSummary
+ {
+ IsExceeded = !budgetResult.IsWithinBudget,
+ TotalUnknowns = budgetResult.TotalUnknowns,
+ TotalLimit = budgetResult.TotalLimit,
+ Violations = budgetResult.Violations
+ }
+ };
+ }
+}
+
+///
+/// Failure reason for policy evaluation.
+///
+public enum PolicyFailureReason
+{
+ // Existing reasons...
+ CveExceedsThreshold,
+ LicenseViolation,
+ // NEW
+ UnknownBudgetExceeded
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `PolicyEvaluator` checks unknown budgets
+- [ ] Blocking configured budgets fail evaluation
+- [ ] `UnknownBudgetExceeded` failure reason added
+- [ ] Budget status included in evaluation result
+- [ ] Exception coverage respected
+
+---
+
+### T6: Add Tests
+
+**Assignee**: Policy Team
+**Story Points**: 1
+**Status**: TODO
+**Dependencies**: T5
+
+**Description**:
+Add comprehensive tests for budget enforcement.
+
+**Implementation Path**: `src/Policy/__Tests/StellaOps.Policy.Unknowns.Tests/Services/UnknownBudgetServiceTests.cs`
+
+**Test Cases**:
+```csharp
+public class UnknownBudgetServiceTests
+{
+ [Fact]
+ public void GetBudgetForEnvironment_KnownEnv_ReturnsBudget()
+ {
+ // Arrange
+ var options = CreateOptions(prod: new UnknownBudget
+ {
+ Environment = "prod",
+ TotalLimit = 3
+ });
+ var service = new UnknownBudgetService(options, NullLogger.Instance);
+
+ // Act
+ var budget = service.GetBudgetForEnvironment("prod");
+
+ // Assert
+ budget.TotalLimit.Should().Be(3);
+ }
+
+ [Fact]
+ public void CheckBudget_WithinLimit_ReturnsSuccess()
+ {
+ var unknowns = CreateUnknowns(count: 2);
+ var result = _service.CheckBudget("prod", unknowns);
+
+ result.IsWithinBudget.Should().BeTrue();
+ }
+
+ [Fact]
+ public void CheckBudget_ExceedsTotal_ReturnsViolation()
+ {
+ var unknowns = CreateUnknowns(count: 5); // limit is 3
+ var result = _service.CheckBudget("prod", unknowns);
+
+ result.IsWithinBudget.Should().BeFalse();
+ result.RecommendedAction.Should().Be(BudgetAction.Block);
+ }
+
+ [Fact]
+ public void CheckBudget_ExceedsReasonLimit_ReturnsSpecificViolation()
+ {
+ var unknowns = CreateUnknowns(
+ reachability: 2, // limit is 0
+ identity: 1);
+ var result = _service.CheckBudget("prod", unknowns);
+
+ result.Violations.Should().ContainKey(UnknownReasonCode.Reachability);
+ result.Violations[UnknownReasonCode.Reachability].Count.Should().Be(2);
+ }
+
+ [Fact]
+ public void CheckBudgetWithEscalation_ExceptionCovers_AllowsOperation()
+ {
+ var unknowns = CreateUnknowns(reachability: 1);
+ var exceptions = new[] { CreateException(UnknownReasonCode.Reachability) };
+
+ var result = _service.CheckBudgetWithEscalation("prod", unknowns, exceptions);
+
+ result.IsWithinBudget.Should().BeTrue();
+ result.Message.Should().Contain("covered by approved exceptions");
+ }
+
+ [Fact]
+ public void ShouldBlock_BlockAction_ReturnsTrue()
+ {
+ var result = new BudgetCheckResult
+ {
+ IsWithinBudget = false,
+ RecommendedAction = BudgetAction.Block
+ };
+
+ _service.ShouldBlock(result).Should().BeTrue();
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] Test for budget retrieval with fallback
+- [ ] Test for within-budget success
+- [ ] Test for total limit violation
+- [ ] Test for per-reason limit violation
+- [ ] Test for exception coverage
+- [ ] Test for block action decision
+- [ ] All tests pass
+
+---
+
+## Delivery Tracker
+
+| # | Task ID | Status | Dependency | Owners | Task Definition |
+|---|---------|--------|------------|--------|-----------------|
+| 1 | T1 | TODO | — | Policy Team | Define UnknownBudget model |
+| 2 | T2 | TODO | T1 | Policy Team | Create UnknownBudgetService |
+| 3 | T3 | TODO | T2 | Policy Team | Implement budget checking logic |
+| 4 | T4 | TODO | T1 | Policy Team | Add policy configuration |
+| 5 | T5 | TODO | T2, T3 | Policy Team | Integrate with PolicyEvaluator |
+| 6 | T6 | TODO | T5 | Policy Team | Add tests |
+
+---
+
+## Execution Log
+
+| Date (UTC) | Update | Owner |
+|------------|--------|-------|
+| 2025-12-21 | Sprint created from MOAT Phase 2 gap analysis. Unknown budgets identified as requirement from Moat #5 advisory. | Claude |
+
+---
+
+## Decisions & Risks
+
+| Item | Type | Owner | Notes |
+|------|------|-------|-------|
+| Environment-keyed budgets | Decision | Policy Team | Allows prod/stage/dev differentiation |
+| BudgetAction enum | Decision | Policy Team | Block, Warn, WarnUnlessException provides flexibility |
+| Exception coverage | Decision | Policy Team | Approved exceptions can override budget violations |
+| Null totalLimit | Decision | Policy Team | Null means unlimited (no budget enforcement) |
+
+---
+
+## Success Criteria
+
+- [ ] All 6 tasks marked DONE
+- [ ] Budget configuration loads from YAML
+- [ ] Policy evaluator respects budget limits
+- [ ] Exceptions can cover violations
+- [ ] 6+ budget-related tests passing
+- [ ] `dotnet build` succeeds
+- [ ] `dotnet test` succeeds
diff --git a/docs/implplan/SPRINT_4100_0001_0003_unknowns_attestations.md b/docs/implplan/SPRINT_4100_0001_0003_unknowns_attestations.md
new file mode 100644
index 000000000..c1bcaae6e
--- /dev/null
+++ b/docs/implplan/SPRINT_4100_0001_0003_unknowns_attestations.md
@@ -0,0 +1,675 @@
+# Sprint 4100.0001.0003 · Unknowns in Attestations
+
+## Topic & Scope
+
+- Include unknown summaries in signed attestations
+- Aggregate unknowns by reason code for policy predicates
+- Enable attestation consumers to verify unknown handling
+
+**Working directory:** `src/Attestor/__Libraries/StellaOps.Attestor.ProofChain/`
+
+## Dependencies & Concurrency
+
+- **Upstream**: Sprint 4100.0001.0001 (Reason-Coded Unknowns), Sprint 4100.0001.0002 (Unknown Budgets) — MUST BE DONE
+- **Downstream**: Sprint 4100.0003.0001 (Risk Verdict Attestation)
+- **Safe to parallelize with**: Sprint 4100.0002.0003, Sprint 4100.0004.0001
+
+## Documentation Prerequisites
+
+- Sprint 4100.0001.0001 completion (UnknownReasonCode enum)
+- Sprint 4100.0001.0002 completion (UnknownBudget model)
+- `src/Attestor/__Libraries/StellaOps.Attestor.ProofChain/AGENTS.md`
+- `docs/product-advisories/19-Dec-2025 - Moat #5.md`
+
+---
+
+## Tasks
+
+### T1: Define UnknownsSummary Model
+
+**Assignee**: Attestor Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Create a model for aggregated unknowns data to include in attestations.
+
+**Implementation Path**: `Models/UnknownsSummary.cs` (new file)
+
+**Model Definition**:
+```csharp
+namespace StellaOps.Attestor.ProofChain.Models;
+
+///
+/// Aggregated summary of unknowns for inclusion in attestations.
+/// Provides verifiable data about unknown risk handled during evaluation.
+///
+public sealed record UnknownsSummary
+{
+ ///
+ /// Total count of unknowns encountered.
+ ///
+ public int Total { get; init; }
+
+ ///
+ /// Count of unknowns by reason code.
+ ///
+ public IReadOnlyDictionary ByReasonCode { get; init; }
+ = new Dictionary();
+
+ ///
+ /// Count of unknowns that would block if not excepted.
+ ///
+ public int BlockingCount { get; init; }
+
+ ///
+ /// Count of unknowns that are covered by approved exceptions.
+ ///
+ public int ExceptedCount { get; init; }
+
+ ///
+ /// Policy thresholds that were evaluated.
+ ///
+ public IReadOnlyList PolicyThresholdsApplied { get; init; } = [];
+
+ ///
+ /// Exception IDs that were applied to cover unknowns.
+ ///
+ public IReadOnlyList ExceptionsApplied { get; init; } = [];
+
+ ///
+ /// Hash of the unknowns list for integrity verification.
+ ///
+ public string? UnknownsDigest { get; init; }
+
+ ///
+ /// Creates an empty summary for cases with no unknowns.
+ ///
+ public static UnknownsSummary Empty { get; } = new()
+ {
+ Total = 0,
+ ByReasonCode = new Dictionary(),
+ BlockingCount = 0,
+ ExceptedCount = 0
+ };
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `UnknownsSummary.cs` file created in `Models/` directory
+- [ ] Total and per-reason-code counts included
+- [ ] Blocking and excepted counts tracked
+- [ ] Policy thresholds and exception IDs recorded
+- [ ] Digest field for integrity verification
+- [ ] Static `Empty` instance for convenience
+
+---
+
+### T2: Extend VerdictReceiptPayload
+
+**Assignee**: Attestor Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Add unknowns summary field to the verdict receipt statement payload.
+
+**Implementation Path**: `Statements/VerdictReceiptStatement.cs`
+
+**Updated Payload**:
+```csharp
+///
+/// Payload for verdict receipt attestation statement.
+///
+public sealed record VerdictReceiptPayload
+{
+ // Existing fields
+ public required string VerdictId { get; init; }
+ public required string ArtifactDigest { get; init; }
+ public required string PolicyRef { get; init; }
+ public required VerdictStatus Status { get; init; }
+ public required DateTimeOffset EvaluatedAt { get; init; }
+ public IReadOnlyList Findings { get; init; } = [];
+ public IReadOnlyList AppliedExceptions { get; init; } = [];
+
+ // NEW: Unknowns summary
+ ///
+ /// Summary of unknowns encountered during evaluation.
+ /// Included for transparency about uncertainty in the verdict.
+ ///
+ public UnknownsSummary? Unknowns { get; init; }
+
+ // NEW: Knowledge snapshot reference
+ ///
+ /// Reference to the knowledge snapshot used for evaluation.
+ /// Enables replay and verification of inputs.
+ ///
+ public string? KnowledgeSnapshotId { get; init; }
+}
+```
+
+**JSON Schema Update**:
+```json
+{
+ "type": "object",
+ "properties": {
+ "verdictId": { "type": "string" },
+ "artifactDigest": { "type": "string" },
+ "unknowns": {
+ "type": "object",
+ "properties": {
+ "total": { "type": "integer" },
+ "byReasonCode": {
+ "type": "object",
+ "additionalProperties": { "type": "integer" }
+ },
+ "blockingCount": { "type": "integer" },
+ "exceptedCount": { "type": "integer" },
+ "policyThresholdsApplied": {
+ "type": "array",
+ "items": { "type": "string" }
+ },
+ "exceptionsApplied": {
+ "type": "array",
+ "items": { "type": "string" }
+ },
+ "unknownsDigest": { "type": "string" }
+ }
+ },
+ "knowledgeSnapshotId": { "type": "string" }
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `Unknowns` field added to `VerdictReceiptPayload`
+- [ ] `KnowledgeSnapshotId` field added for replay support
+- [ ] JSON schema updated with unknowns structure
+- [ ] Field is nullable for backward compatibility
+- [ ] Existing attestation tests still pass
+
+---
+
+### T3: Create UnknownsAggregator
+
+**Assignee**: Attestor Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Implement service to aggregate unknowns into summary format for attestations.
+
+**Implementation Path**: `Services/UnknownsAggregator.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Attestor.ProofChain.Services;
+
+///
+/// Aggregates unknowns data into summary format for attestations.
+///
+public sealed class UnknownsAggregator : IUnknownsAggregator
+{
+ private readonly IHasher _hasher;
+
+ public UnknownsAggregator(IHasher hasher)
+ {
+ _hasher = hasher;
+ }
+
+ ///
+ /// Creates an unknowns summary from evaluation results.
+ ///
+ public UnknownsSummary Aggregate(
+ IReadOnlyList unknowns,
+ BudgetCheckResult? budgetResult = null,
+ IReadOnlyList? exceptions = null)
+ {
+ if (unknowns.Count == 0)
+ return UnknownsSummary.Empty;
+
+ // Count by reason code
+ var byReasonCode = unknowns
+ .GroupBy(u => u.ReasonCode.ToString())
+ .ToDictionary(g => g.Key, g => g.Count());
+
+ // Calculate blocking count (would block without exceptions)
+ var blockingCount = budgetResult?.Violations.Values.Sum(v => v.Count) ?? 0;
+
+ // Calculate excepted count
+ var exceptedCount = exceptions?.Count ?? 0;
+
+ // Compute digest of unknowns list for integrity
+ var unknownsDigest = ComputeUnknownsDigest(unknowns);
+
+ // Extract policy thresholds that were checked
+ var thresholds = budgetResult?.Violations.Keys
+ .Select(k => $"{k}:{budgetResult.Violations[k].Limit}")
+ .ToList() ?? [];
+
+ // Extract applied exception IDs
+ var exceptionIds = exceptions?
+ .Select(e => e.ExceptionId)
+ .ToList() ?? [];
+
+ return new UnknownsSummary
+ {
+ Total = unknowns.Count,
+ ByReasonCode = byReasonCode,
+ BlockingCount = blockingCount,
+ ExceptedCount = exceptedCount,
+ PolicyThresholdsApplied = thresholds,
+ ExceptionsApplied = exceptionIds,
+ UnknownsDigest = unknownsDigest
+ };
+ }
+
+ ///
+ /// Computes a deterministic digest of the unknowns list.
+ ///
+ private string ComputeUnknownsDigest(IReadOnlyList unknowns)
+ {
+ // Sort for determinism
+ var sorted = unknowns
+ .OrderBy(u => u.PackageUrl)
+ .ThenBy(u => u.CveId)
+ .ThenBy(u => u.ReasonCode.ToString())
+ .ToList();
+
+ // Serialize to canonical JSON
+ var json = JsonSerializer.Serialize(sorted, new JsonSerializerOptions
+ {
+ PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
+ WriteIndented = false
+ });
+
+ // Hash the serialized data
+ return _hasher.ComputeSha256(json);
+ }
+}
+
+///
+/// Input item for unknowns aggregation.
+///
+public sealed record UnknownItem(
+ string PackageUrl,
+ string? CveId,
+ string ReasonCode,
+ string? RemediationHint);
+
+///
+/// Reference to an applied exception.
+///
+public sealed record ExceptionRef(
+ string ExceptionId,
+ string Status,
+ IReadOnlyList CoveredReasonCodes);
+
+public interface IUnknownsAggregator
+{
+ UnknownsSummary Aggregate(
+ IReadOnlyList unknowns,
+ BudgetCheckResult? budgetResult = null,
+ IReadOnlyList? exceptions = null);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `UnknownsAggregator.cs` created in `Services/`
+- [ ] Aggregates unknowns by reason code
+- [ ] Computes blocking and excepted counts
+- [ ] Generates deterministic digest of unknowns
+- [ ] Records policy thresholds and exception IDs
+- [ ] Interface defined for DI
+
+---
+
+### T4: Update PolicyDecisionPredicate
+
+**Assignee**: Attestor Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T2, T3
+
+**Description**:
+Include unknowns data in the policy decision predicate for attestation verification.
+
+**Implementation Path**: `Predicates/PolicyDecisionPredicate.cs`
+
+**Updated Predicate**:
+```csharp
+namespace StellaOps.Attestor.ProofChain.Predicates;
+
+///
+/// Predicate type for policy decision attestations.
+///
+public sealed record PolicyDecisionPredicate
+{
+ public const string PredicateType = "https://stella.ops/predicates/policy-decision@v2";
+
+ // Existing fields
+ public required string PolicyRef { get; init; }
+ public required PolicyDecision Decision { get; init; }
+ public required DateTimeOffset EvaluatedAt { get; init; }
+ public IReadOnlyList Findings { get; init; } = [];
+
+ // NEW: Unknowns handling
+ ///
+ /// Summary of unknowns and how they were handled.
+ ///
+ public UnknownsSummary? Unknowns { get; init; }
+
+ ///
+ /// Whether unknowns were a factor in the decision.
+ ///
+ public bool UnknownsAffectedDecision { get; init; }
+
+ ///
+ /// Reason codes that caused blocking (if any).
+ ///
+ public IReadOnlyList BlockingReasonCodes { get; init; } = [];
+
+ // NEW: Knowledge snapshot reference
+ ///
+ /// Content-addressed ID of the knowledge snapshot used.
+ ///
+ public string? KnowledgeSnapshotId { get; init; }
+}
+
+///
+/// Policy decision outcome.
+///
+public enum PolicyDecision
+{
+ Pass,
+ Fail,
+ PassWithExceptions,
+ Indeterminate
+}
+```
+
+**Predicate Builder Update**:
+```csharp
+public PolicyDecisionPredicate Build(PolicyEvaluationResult result)
+{
+ var unknownsAffected = result.UnknownBudgetStatus?.IsExceeded == true ||
+ result.FailureReason == PolicyFailureReason.UnknownBudgetExceeded;
+
+ var blockingCodes = result.UnknownBudgetStatus?.Violations.Keys
+ .Select(k => k.ToString())
+ .ToList() ?? [];
+
+ return new PolicyDecisionPredicate
+ {
+ PolicyRef = result.PolicyRef,
+ Decision = MapDecision(result),
+ EvaluatedAt = result.EvaluatedAt,
+ Findings = result.Findings.Select(MapFinding).ToList(),
+ Unknowns = _aggregator.Aggregate(result.Unknowns, result.UnknownBudgetStatus),
+ UnknownsAffectedDecision = unknownsAffected,
+ BlockingReasonCodes = blockingCodes,
+ KnowledgeSnapshotId = result.KnowledgeSnapshotId
+ };
+}
+```
+
+**Acceptance Criteria**:
+- [ ] Predicate version bumped to v2
+- [ ] `Unknowns` field added with summary
+- [ ] `UnknownsAffectedDecision` boolean flag
+- [ ] `BlockingReasonCodes` list for failed verdicts
+- [ ] `KnowledgeSnapshotId` for replay support
+- [ ] Predicate builder uses aggregator
+
+---
+
+### T5: Add Attestation Tests
+
+**Assignee**: Attestor Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T4
+
+**Description**:
+Add tests verifying unknowns are correctly included in signed attestations.
+
+**Implementation Path**: `src/Attestor/__Tests/StellaOps.Attestor.ProofChain.Tests/`
+
+**Test Cases**:
+```csharp
+public class UnknownsSummaryTests
+{
+ [Fact]
+ public void Empty_ReturnsZeroCounts()
+ {
+ var summary = UnknownsSummary.Empty;
+
+ summary.Total.Should().Be(0);
+ summary.ByReasonCode.Should().BeEmpty();
+ summary.BlockingCount.Should().Be(0);
+ }
+}
+
+public class UnknownsAggregatorTests
+{
+ [Fact]
+ public void Aggregate_GroupsByReasonCode()
+ {
+ var unknowns = new[]
+ {
+ new UnknownItem("pkg:npm/foo@1.0", null, "Reachability", null),
+ new UnknownItem("pkg:npm/bar@1.0", null, "Reachability", null),
+ new UnknownItem("pkg:npm/baz@1.0", null, "Identity", null)
+ };
+
+ var summary = _aggregator.Aggregate(unknowns);
+
+ summary.Total.Should().Be(3);
+ summary.ByReasonCode["Reachability"].Should().Be(2);
+ summary.ByReasonCode["Identity"].Should().Be(1);
+ }
+
+ [Fact]
+ public void Aggregate_ComputesDeterministicDigest()
+ {
+ var unknowns = CreateUnknowns();
+
+ var summary1 = _aggregator.Aggregate(unknowns);
+ var summary2 = _aggregator.Aggregate(unknowns.Reverse().ToList());
+
+ summary1.UnknownsDigest.Should().Be(summary2.UnknownsDigest);
+ }
+
+ [Fact]
+ public void Aggregate_IncludesExceptionIds()
+ {
+ var unknowns = CreateUnknowns();
+ var exceptions = new[]
+ {
+ new ExceptionRef("EXC-001", "Approved", new[] { "Reachability" })
+ };
+
+ var summary = _aggregator.Aggregate(unknowns, null, exceptions);
+
+ summary.ExceptionsApplied.Should().Contain("EXC-001");
+ summary.ExceptedCount.Should().Be(1);
+ }
+}
+
+public class VerdictReceiptStatementTests
+{
+ [Fact]
+ public void CreateStatement_IncludesUnknownsSummary()
+ {
+ var result = CreateEvaluationResult(unknownsCount: 5);
+
+ var statement = _builder.Build(result);
+
+ statement.Predicate.Unknowns.Should().NotBeNull();
+ statement.Predicate.Unknowns.Total.Should().Be(5);
+ }
+
+ [Fact]
+ public void CreateStatement_SignatureCoversUnknowns()
+ {
+ var result = CreateEvaluationResult(unknownsCount: 5);
+
+ var envelope = _signer.SignStatement(result);
+
+ // Modify unknowns and verify signature fails
+ var tampered = envelope with
+ {
+ Payload = ModifyUnknownsCount(envelope.Payload, 0)
+ };
+
+ _verifier.Verify(tampered).Should().BeFalse();
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] Test for empty summary creation
+- [ ] Test for reason code grouping
+- [ ] Test for deterministic digest computation
+- [ ] Test for exception ID inclusion
+- [ ] Test for unknowns in statement payload
+- [ ] Test that signature covers unknowns data
+- [ ] All 6+ tests pass
+
+---
+
+### T6: Update Predicate Schema
+
+**Assignee**: Attestor Team
+**Story Points**: 1
+**Status**: TODO
+**Dependencies**: T4
+
+**Description**:
+Update the JSON schema documentation for the policy decision predicate.
+
+**Implementation Path**: `docs/api/predicates/policy-decision-v2.schema.json`
+
+**Schema Documentation**:
+```json
+{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://stella.ops/predicates/policy-decision@v2",
+ "title": "Policy Decision Predicate v2",
+ "description": "Attestation predicate for policy evaluation decisions, including unknowns handling.",
+ "type": "object",
+ "required": ["policyRef", "decision", "evaluatedAt"],
+ "properties": {
+ "policyRef": {
+ "type": "string",
+ "description": "Reference to the policy that was evaluated"
+ },
+ "decision": {
+ "type": "string",
+ "enum": ["Pass", "Fail", "PassWithExceptions", "Indeterminate"],
+ "description": "Final policy decision"
+ },
+ "evaluatedAt": {
+ "type": "string",
+ "format": "date-time",
+ "description": "ISO-8601 timestamp of evaluation"
+ },
+ "unknowns": {
+ "type": "object",
+ "description": "Summary of unknowns encountered during evaluation",
+ "properties": {
+ "total": {
+ "type": "integer",
+ "minimum": 0,
+ "description": "Total count of unknowns"
+ },
+ "byReasonCode": {
+ "type": "object",
+ "additionalProperties": { "type": "integer" },
+ "description": "Count per reason code (Reachability, Identity, etc.)"
+ },
+ "blockingCount": {
+ "type": "integer",
+ "minimum": 0,
+ "description": "Count that would block without exceptions"
+ },
+ "exceptedCount": {
+ "type": "integer",
+ "minimum": 0,
+ "description": "Count covered by approved exceptions"
+ },
+ "unknownsDigest": {
+ "type": "string",
+ "description": "SHA-256 digest of unknowns list"
+ }
+ }
+ },
+ "unknownsAffectedDecision": {
+ "type": "boolean",
+ "description": "Whether unknowns influenced the decision"
+ },
+ "blockingReasonCodes": {
+ "type": "array",
+ "items": { "type": "string" },
+ "description": "Reason codes that caused blocking"
+ },
+ "knowledgeSnapshotId": {
+ "type": "string",
+ "description": "Content-addressed ID of knowledge snapshot"
+ }
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] Schema file created at `docs/api/predicates/`
+- [ ] All new fields documented
+- [ ] Schema validates against sample payloads
+- [ ] Version bump to v2 documented
+
+---
+
+## Delivery Tracker
+
+| # | Task ID | Status | Dependency | Owners | Task Definition |
+|---|---------|--------|------------|--------|-----------------|
+| 1 | T1 | TODO | — | Attestor Team | Define UnknownsSummary model |
+| 2 | T2 | TODO | T1 | Attestor Team | Extend VerdictReceiptPayload |
+| 3 | T3 | TODO | T1 | Attestor Team | Create UnknownsAggregator |
+| 4 | T4 | TODO | T2, T3 | Attestor Team | Update PolicyDecisionPredicate |
+| 5 | T5 | TODO | T4 | Attestor Team | Add attestation tests |
+| 6 | T6 | TODO | T4 | Attestor Team | Update predicate schema |
+
+---
+
+## Execution Log
+
+| Date (UTC) | Update | Owner |
+|------------|--------|-------|
+| 2025-12-21 | Sprint created from MOAT Phase 2 gap analysis. Unknowns in attestations identified as requirement from Moat #5 advisory. | Claude |
+
+---
+
+## Decisions & Risks
+
+| Item | Type | Owner | Notes |
+|------|------|-------|-------|
+| Predicate version bump | Decision | Attestor Team | v1 → v2 for backward compatibility tracking |
+| Deterministic digest | Decision | Attestor Team | Enables tamper detection of unknowns list |
+| String reason codes | Decision | Attestor Team | Using strings instead of enums for JSON flexibility |
+| Nullable unknowns | Decision | Attestor Team | Allows backward compatibility with v1 payloads |
+
+---
+
+## Success Criteria
+
+- [ ] All 6 tasks marked DONE
+- [ ] Unknowns summary included in attestations
+- [ ] Predicate schema v2 documented
+- [ ] Aggregator computes deterministic digests
+- [ ] 6+ attestation tests passing
+- [ ] `dotnet build` succeeds
+- [ ] `dotnet test` succeeds
diff --git a/docs/implplan/SPRINT_4100_0002_0001_knowledge_snapshot_manifest.md b/docs/implplan/SPRINT_4100_0002_0001_knowledge_snapshot_manifest.md
new file mode 100644
index 000000000..703d0cc77
--- /dev/null
+++ b/docs/implplan/SPRINT_4100_0002_0001_knowledge_snapshot_manifest.md
@@ -0,0 +1,949 @@
+# Sprint 4100.0002.0001 · Knowledge Snapshot Manifest
+
+## Topic & Scope
+
+- Define unified content-addressed manifest for knowledge snapshots
+- Enable deterministic capture of all evaluation inputs
+- Support time-travel replay by freezing knowledge state
+
+**Working directory:** `src/Policy/__Libraries/StellaOps.Policy/Snapshots/`
+
+## Dependencies & Concurrency
+
+- **Upstream**: None (first sprint in batch)
+- **Downstream**: Sprint 4100.0002.0002 (Replay Engine), Sprint 4100.0002.0003 (Snapshot Export/Import), Sprint 4100.0004.0001 (Security State Delta)
+- **Safe to parallelize with**: Sprint 4100.0001.0001, Sprint 4100.0003.0001, Sprint 4100.0004.0002
+
+## Documentation Prerequisites
+
+- `src/Policy/__Libraries/StellaOps.Policy/AGENTS.md`
+- `docs/product-advisories/20-Dec-2025 - Moat Explanation - Knowledge Snapshots and Time‑Travel Replay.md`
+- `docs/product-advisories/19-Dec-2025 - Moat #2.md` (Risk Verdict Attestation)
+
+---
+
+## Tasks
+
+### T1: Define KnowledgeSnapshotManifest
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Create the unified manifest structure for knowledge snapshots.
+
+**Implementation Path**: `Snapshots/KnowledgeSnapshotManifest.cs` (new file)
+
+**Model Definition**:
+```csharp
+namespace StellaOps.Policy.Snapshots;
+
+///
+/// Unified manifest for a knowledge snapshot.
+/// Content-addressed bundle capturing all inputs to a policy evaluation.
+///
+public sealed record KnowledgeSnapshotManifest
+{
+ ///
+ /// Content-addressed snapshot ID: ksm:sha256:{hash}
+ ///
+ public required string SnapshotId { get; init; }
+
+ ///
+ /// When this snapshot was created (UTC).
+ ///
+ public required DateTimeOffset CreatedAt { get; init; }
+
+ ///
+ /// Engine version that created this snapshot.
+ ///
+ public required EngineInfo Engine { get; init; }
+
+ ///
+ /// Plugins/analyzers active during snapshot creation.
+ ///
+ public IReadOnlyList Plugins { get; init; } = [];
+
+ ///
+ /// Reference to the policy bundle used.
+ ///
+ public required PolicyBundleRef Policy { get; init; }
+
+ ///
+ /// Reference to the scoring rules used.
+ ///
+ public required ScoringRulesRef Scoring { get; init; }
+
+ ///
+ /// Reference to the trust bundle (root certificates, VEX publishers).
+ ///
+ public TrustBundleRef? Trust { get; init; }
+
+ ///
+ /// Knowledge sources included in this snapshot.
+ ///
+ public required IReadOnlyList Sources { get; init; }
+
+ ///
+ /// Determinism profile for environment reproducibility.
+ ///
+ public DeterminismProfile? Environment { get; init; }
+
+ ///
+ /// Optional DSSE signature over the manifest.
+ ///
+ public string? Signature { get; init; }
+
+ ///
+ /// Manifest format version.
+ ///
+ public string ManifestVersion { get; init; } = "1.0";
+}
+
+///
+/// Engine version information.
+///
+public sealed record EngineInfo(
+ string Name,
+ string Version,
+ string Commit);
+
+///
+/// Plugin/analyzer information.
+///
+public sealed record PluginInfo(
+ string Name,
+ string Version,
+ string Type);
+
+///
+/// Reference to a policy bundle.
+///
+public sealed record PolicyBundleRef(
+ string PolicyId,
+ string Digest,
+ string? Uri);
+
+///
+/// Reference to scoring rules.
+///
+public sealed record ScoringRulesRef(
+ string RulesId,
+ string Digest,
+ string? Uri);
+
+///
+/// Reference to trust bundle.
+///
+public sealed record TrustBundleRef(
+ string BundleId,
+ string Digest,
+ string? Uri);
+
+///
+/// Determinism profile for environment capture.
+///
+public sealed record DeterminismProfile(
+ string TimezoneOffset,
+ string Locale,
+ string Platform,
+ IReadOnlyDictionary EnvironmentVars);
+```
+
+**Acceptance Criteria**:
+- [ ] `KnowledgeSnapshotManifest.cs` file created in `Snapshots/` directory
+- [ ] All component records defined (EngineInfo, PluginInfo, etc.)
+- [ ] SnapshotId uses content-addressed format `ksm:sha256:{hash}`
+- [ ] Manifest is immutable (all init-only properties)
+- [ ] XML documentation on all types
+
+---
+
+### T2: Define KnowledgeSourceDescriptor
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Create a model describing each knowledge source in the snapshot.
+
+**Implementation Path**: `Snapshots/KnowledgeSourceDescriptor.cs` (new file)
+
+**Model Definition**:
+```csharp
+namespace StellaOps.Policy.Snapshots;
+
+///
+/// Descriptor for a knowledge source included in a snapshot.
+///
+public sealed record KnowledgeSourceDescriptor
+{
+ ///
+ /// Unique name of the source (e.g., "nvd", "osv", "vendor-vex").
+ ///
+ public required string Name { get; init; }
+
+ ///
+ /// Type of source: "advisory-feed", "vex", "sbom", "reachability", "policy".
+ ///
+ public required string Type { get; init; }
+
+ ///
+ /// Epoch or version of the source data.
+ ///
+ public required string Epoch { get; init; }
+
+ ///
+ /// Content digest of the source data.
+ ///
+ public required string Digest { get; init; }
+
+ ///
+ /// Origin URI where this source was fetched from.
+ ///
+ public string? Origin { get; init; }
+
+ ///
+ /// When this source was last updated.
+ ///
+ public DateTimeOffset? LastUpdatedAt { get; init; }
+
+ ///
+ /// Record count or entry count in this source.
+ ///
+ public int? RecordCount { get; init; }
+
+ ///
+ /// Whether this source is bundled (embedded) or referenced.
+ ///
+ public SourceInclusionMode InclusionMode { get; init; } = SourceInclusionMode.Referenced;
+
+ ///
+ /// Relative path within the snapshot bundle (if bundled).
+ ///
+ public string? BundlePath { get; init; }
+}
+
+///
+/// How a source is included in the snapshot.
+///
+public enum SourceInclusionMode
+{
+ ///
+ /// Source is referenced by digest only (requires external fetch for replay).
+ ///
+ Referenced,
+
+ ///
+ /// Source content is embedded in the snapshot bundle.
+ ///
+ Bundled,
+
+ ///
+ /// Source is bundled and compressed.
+ ///
+ BundledCompressed
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `KnowledgeSourceDescriptor.cs` file created
+- [ ] Source types defined: advisory-feed, vex, sbom, reachability, policy
+- [ ] Inclusion modes defined: Referenced, Bundled, BundledCompressed
+- [ ] Digest and epoch for content addressing
+- [ ] Optional bundle path for embedded sources
+
+---
+
+### T3: Create SnapshotBuilder
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T1, T2
+
+**Description**:
+Implement a fluent API for constructing snapshot manifests.
+
+**Implementation Path**: `Snapshots/SnapshotBuilder.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.Snapshots;
+
+///
+/// Fluent builder for constructing knowledge snapshot manifests.
+///
+public sealed class SnapshotBuilder
+{
+ private readonly List _sources = [];
+ private readonly List _plugins = [];
+ private EngineInfo? _engine;
+ private PolicyBundleRef? _policy;
+ private ScoringRulesRef? _scoring;
+ private TrustBundleRef? _trust;
+ private DeterminismProfile? _environment;
+ private readonly IHasher _hasher;
+
+ public SnapshotBuilder(IHasher hasher)
+ {
+ _hasher = hasher;
+ }
+
+ public SnapshotBuilder WithEngine(string name, string version, string commit)
+ {
+ _engine = new EngineInfo(name, version, commit);
+ return this;
+ }
+
+ public SnapshotBuilder WithPlugin(string name, string version, string type)
+ {
+ _plugins.Add(new PluginInfo(name, version, type));
+ return this;
+ }
+
+ public SnapshotBuilder WithPolicy(string policyId, string digest, string? uri = null)
+ {
+ _policy = new PolicyBundleRef(policyId, digest, uri);
+ return this;
+ }
+
+ public SnapshotBuilder WithScoring(string rulesId, string digest, string? uri = null)
+ {
+ _scoring = new ScoringRulesRef(rulesId, digest, uri);
+ return this;
+ }
+
+ public SnapshotBuilder WithTrust(string bundleId, string digest, string? uri = null)
+ {
+ _trust = new TrustBundleRef(bundleId, digest, uri);
+ return this;
+ }
+
+ public SnapshotBuilder WithSource(KnowledgeSourceDescriptor source)
+ {
+ _sources.Add(source);
+ return this;
+ }
+
+ public SnapshotBuilder WithAdvisoryFeed(
+ string name, string epoch, string digest, string? origin = null)
+ {
+ _sources.Add(new KnowledgeSourceDescriptor
+ {
+ Name = name,
+ Type = "advisory-feed",
+ Epoch = epoch,
+ Digest = digest,
+ Origin = origin
+ });
+ return this;
+ }
+
+ public SnapshotBuilder WithVex(string name, string digest, string? origin = null)
+ {
+ _sources.Add(new KnowledgeSourceDescriptor
+ {
+ Name = name,
+ Type = "vex",
+ Epoch = DateTimeOffset.UtcNow.ToString("o"),
+ Digest = digest,
+ Origin = origin
+ });
+ return this;
+ }
+
+ public SnapshotBuilder WithEnvironment(DeterminismProfile environment)
+ {
+ _environment = environment;
+ return this;
+ }
+
+ public SnapshotBuilder CaptureCurrentEnvironment()
+ {
+ _environment = new DeterminismProfile(
+ TimezoneOffset: TimeZoneInfo.Local.BaseUtcOffset.ToString(),
+ Locale: CultureInfo.CurrentCulture.Name,
+ Platform: Environment.OSVersion.ToString(),
+ EnvironmentVars: new Dictionary());
+ return this;
+ }
+
+ ///
+ /// Builds the manifest and computes the content-addressed ID.
+ ///
+ public KnowledgeSnapshotManifest Build()
+ {
+ if (_engine is null)
+ throw new InvalidOperationException("Engine info is required");
+ if (_policy is null)
+ throw new InvalidOperationException("Policy reference is required");
+ if (_scoring is null)
+ throw new InvalidOperationException("Scoring reference is required");
+ if (_sources.Count == 0)
+ throw new InvalidOperationException("At least one source is required");
+
+ // Create manifest without ID first
+ var manifest = new KnowledgeSnapshotManifest
+ {
+ SnapshotId = "", // Placeholder
+ CreatedAt = DateTimeOffset.UtcNow,
+ Engine = _engine,
+ Plugins = _plugins.ToList(),
+ Policy = _policy,
+ Scoring = _scoring,
+ Trust = _trust,
+ Sources = _sources.OrderBy(s => s.Name).ToList(),
+ Environment = _environment
+ };
+
+ // Compute content-addressed ID
+ var snapshotId = ComputeSnapshotId(manifest);
+
+ return manifest with { SnapshotId = snapshotId };
+ }
+
+ private string ComputeSnapshotId(KnowledgeSnapshotManifest manifest)
+ {
+ // Serialize to canonical JSON (sorted keys, no whitespace)
+ var json = JsonSerializer.Serialize(manifest with { SnapshotId = "" },
+ new JsonSerializerOptions
+ {
+ PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
+ WriteIndented = false,
+ DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
+ });
+
+ var hash = _hasher.ComputeSha256(json);
+ return $"ksm:sha256:{hash}";
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `SnapshotBuilder.cs` created in `Snapshots/`
+- [ ] Fluent API for all manifest components
+- [ ] Validation on Build() for required fields
+- [ ] Content-addressed ID computed from manifest hash
+- [ ] Sources sorted for determinism
+- [ ] Environment capture helper method
+
+---
+
+### T4: Implement Content-Addressed ID
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T3
+
+**Description**:
+Ensure snapshot ID is deterministically computed from manifest content.
+
+**Implementation Path**: `Snapshots/SnapshotIdGenerator.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.Snapshots;
+
+///
+/// Generates and validates content-addressed snapshot IDs.
+///
+public sealed class SnapshotIdGenerator : ISnapshotIdGenerator
+{
+ private const string Prefix = "ksm:sha256:";
+ private readonly IHasher _hasher;
+
+ public SnapshotIdGenerator(IHasher hasher)
+ {
+ _hasher = hasher;
+ }
+
+ ///
+ /// Generates a content-addressed ID for a manifest.
+ ///
+ public string GenerateId(KnowledgeSnapshotManifest manifest)
+ {
+ var canonicalJson = ToCanonicalJson(manifest with { SnapshotId = "", Signature = null });
+ var hash = _hasher.ComputeSha256(canonicalJson);
+ return $"{Prefix}{hash}";
+ }
+
+ ///
+ /// Validates that a manifest's ID matches its content.
+ ///
+ public bool ValidateId(KnowledgeSnapshotManifest manifest)
+ {
+ var expectedId = GenerateId(manifest);
+ return manifest.SnapshotId == expectedId;
+ }
+
+ ///
+ /// Parses a snapshot ID into its components.
+ ///
+ public SnapshotIdComponents? ParseId(string snapshotId)
+ {
+ if (!snapshotId.StartsWith(Prefix))
+ return null;
+
+ var hash = snapshotId[Prefix.Length..];
+ if (hash.Length != 64) // SHA-256 hex length
+ return null;
+
+ return new SnapshotIdComponents("sha256", hash);
+ }
+
+ private static string ToCanonicalJson(KnowledgeSnapshotManifest manifest)
+ {
+ return JsonSerializer.Serialize(manifest, new JsonSerializerOptions
+ {
+ PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
+ WriteIndented = false,
+ DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
+ Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping
+ });
+ }
+}
+
+public sealed record SnapshotIdComponents(string Algorithm, string Hash);
+
+public interface ISnapshotIdGenerator
+{
+ string GenerateId(KnowledgeSnapshotManifest manifest);
+ bool ValidateId(KnowledgeSnapshotManifest manifest);
+ SnapshotIdComponents? ParseId(string snapshotId);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `SnapshotIdGenerator.cs` created
+- [ ] ID format: `ksm:sha256:{64-char-hex}`
+- [ ] ID excludes signature field from hash
+- [ ] Validation method confirms ID matches content
+- [ ] Parse method extracts algorithm and hash
+- [ ] Interface defined for DI
+
+---
+
+### T5: Create SnapshotService
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T3, T4
+
+**Description**:
+Implement service for creating, sealing, and verifying snapshots.
+
+**Implementation Path**: `Snapshots/SnapshotService.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.Snapshots;
+
+///
+/// Service for managing knowledge snapshots.
+///
+public sealed class SnapshotService : ISnapshotService
+{
+ private readonly ISnapshotIdGenerator _idGenerator;
+ private readonly ISigner _signer;
+ private readonly ISnapshotStore _store;
+ private readonly ILogger _logger;
+
+ public SnapshotService(
+ ISnapshotIdGenerator idGenerator,
+ ISigner signer,
+ ISnapshotStore store,
+ ILogger logger)
+ {
+ _idGenerator = idGenerator;
+ _signer = signer;
+ _store = store;
+ _logger = logger;
+ }
+
+ ///
+ /// Creates and persists a new snapshot.
+ ///
+ public async Task CreateSnapshotAsync(
+ SnapshotBuilder builder,
+ CancellationToken ct = default)
+ {
+ var manifest = builder.Build();
+
+ // Validate ID before storing
+ if (!_idGenerator.ValidateId(manifest))
+ throw new InvalidOperationException("Snapshot ID validation failed");
+
+ await _store.SaveAsync(manifest, ct);
+
+ _logger.LogInformation("Created snapshot {SnapshotId}", manifest.SnapshotId);
+
+ return manifest;
+ }
+
+ ///
+ /// Seals a snapshot with a DSSE signature.
+ ///
+ public async Task SealSnapshotAsync(
+ KnowledgeSnapshotManifest manifest,
+ CancellationToken ct = default)
+ {
+ var payload = JsonSerializer.SerializeToUtf8Bytes(manifest with { Signature = null });
+ var signature = await _signer.SignAsync(payload, ct);
+
+ var sealed = manifest with { Signature = signature };
+
+ await _store.SaveAsync(sealed, ct);
+
+ _logger.LogInformation("Sealed snapshot {SnapshotId}", manifest.SnapshotId);
+
+ return sealed;
+ }
+
+ ///
+ /// Verifies a snapshot's integrity and signature.
+ ///
+ public async Task VerifySnapshotAsync(
+ KnowledgeSnapshotManifest manifest,
+ CancellationToken ct = default)
+ {
+ // Verify content-addressed ID
+ if (!_idGenerator.ValidateId(manifest))
+ {
+ return SnapshotVerificationResult.Fail("Snapshot ID does not match content");
+ }
+
+ // Verify signature if present
+ if (manifest.Signature is not null)
+ {
+ var payload = JsonSerializer.SerializeToUtf8Bytes(manifest with { Signature = null });
+ var sigValid = await _signer.VerifyAsync(payload, manifest.Signature, ct);
+
+ if (!sigValid)
+ {
+ return SnapshotVerificationResult.Fail("Signature verification failed");
+ }
+ }
+
+ return SnapshotVerificationResult.Success();
+ }
+
+ ///
+ /// Retrieves a snapshot by ID.
+ ///
+ public async Task GetSnapshotAsync(
+ string snapshotId,
+ CancellationToken ct = default)
+ {
+ return await _store.GetAsync(snapshotId, ct);
+ }
+}
+
+public sealed record SnapshotVerificationResult(bool IsValid, string? Error)
+{
+ public static SnapshotVerificationResult Success() => new(true, null);
+ public static SnapshotVerificationResult Fail(string error) => new(false, error);
+}
+
+public interface ISnapshotService
+{
+ Task CreateSnapshotAsync(SnapshotBuilder builder, CancellationToken ct = default);
+ Task SealSnapshotAsync(KnowledgeSnapshotManifest manifest, CancellationToken ct = default);
+ Task VerifySnapshotAsync(KnowledgeSnapshotManifest manifest, CancellationToken ct = default);
+ Task GetSnapshotAsync(string snapshotId, CancellationToken ct = default);
+}
+
+public interface ISnapshotStore
+{
+ Task SaveAsync(KnowledgeSnapshotManifest manifest, CancellationToken ct = default);
+ Task GetAsync(string snapshotId, CancellationToken ct = default);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `SnapshotService.cs` created in `Snapshots/`
+- [ ] Create, seal, verify, and get operations
+- [ ] Sealing adds DSSE signature
+- [ ] Verification checks ID and signature
+- [ ] Store interface for persistence abstraction
+- [ ] Logging for observability
+
+---
+
+### T6: Integrate with PolicyEvaluator
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T5
+
+**Description**:
+Bind policy evaluation to a knowledge snapshot for reproducibility.
+
+**Implementation Path**: `src/Policy/StellaOps.Policy.Engine/Services/PolicyEvaluator.cs`
+
+**Integration**:
+```csharp
+public sealed class PolicyEvaluator
+{
+ private readonly ISnapshotService _snapshotService;
+
+ ///
+ /// Evaluates policy with an explicit knowledge snapshot.
+ ///
+ public async Task EvaluateWithSnapshotAsync(
+ PolicyEvaluationRequest request,
+ KnowledgeSnapshotManifest snapshot,
+ CancellationToken ct = default)
+ {
+ // Verify snapshot before use
+ var verification = await _snapshotService.VerifySnapshotAsync(snapshot, ct);
+ if (!verification.IsValid)
+ {
+ return PolicyEvaluationResult.Fail(
+ PolicyFailureReason.InvalidSnapshot,
+ verification.Error);
+ }
+
+ // Bind evaluation to snapshot sources
+ var context = await CreateEvaluationContext(request, snapshot, ct);
+
+ // Perform evaluation with frozen inputs
+ var result = await EvaluateInternalAsync(context, ct);
+
+ // Include snapshot reference in result
+ return result with
+ {
+ KnowledgeSnapshotId = snapshot.SnapshotId,
+ SnapshotCreatedAt = snapshot.CreatedAt
+ };
+ }
+
+ ///
+ /// Creates a snapshot capturing current knowledge state.
+ ///
+ public async Task CaptureCurrentSnapshotAsync(
+ CancellationToken ct = default)
+ {
+ var builder = new SnapshotBuilder(_hasher)
+ .WithEngine("StellaOps.Policy", _version, _commit)
+ .WithPolicy(_policyRef.Id, _policyRef.Digest)
+ .WithScoring(_scoringRef.Id, _scoringRef.Digest);
+
+ // Add all active knowledge sources
+ foreach (var source in await _knowledgeSourceProvider.GetActiveSourcesAsync(ct))
+ {
+ builder.WithSource(source);
+ }
+
+ builder.CaptureCurrentEnvironment();
+
+ return await _snapshotService.CreateSnapshotAsync(builder, ct);
+ }
+}
+
+// Extended result
+public sealed record PolicyEvaluationResult
+{
+ // Existing fields...
+ public string? KnowledgeSnapshotId { get; init; }
+ public DateTimeOffset? SnapshotCreatedAt { get; init; }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `EvaluateWithSnapshotAsync` method added
+- [ ] Snapshot verification before evaluation
+- [ ] Evaluation bound to snapshot sources
+- [ ] `CaptureCurrentSnapshotAsync` for snapshot creation
+- [ ] Result includes snapshot reference
+- [ ] `InvalidSnapshot` failure reason added
+
+---
+
+### T7: Add Tests
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T6
+
+**Description**:
+Add comprehensive tests for snapshot determinism and integrity.
+
+**Implementation Path**: `src/Policy/__Tests/StellaOps.Policy.Tests/Snapshots/`
+
+**Test Cases**:
+```csharp
+public class SnapshotBuilderTests
+{
+ [Fact]
+ public void Build_ValidInputs_CreatesManifest()
+ {
+ var builder = new SnapshotBuilder(_hasher)
+ .WithEngine("test", "1.0", "abc123")
+ .WithPolicy("policy-1", "sha256:xxx")
+ .WithScoring("scoring-1", "sha256:yyy")
+ .WithAdvisoryFeed("nvd", "2025-12-21", "sha256:zzz");
+
+ var manifest = builder.Build();
+
+ manifest.SnapshotId.Should().StartWith("ksm:sha256:");
+ manifest.Sources.Should().HaveCount(1);
+ }
+
+ [Fact]
+ public void Build_MissingEngine_Throws()
+ {
+ var builder = new SnapshotBuilder(_hasher)
+ .WithPolicy("policy-1", "sha256:xxx")
+ .WithScoring("scoring-1", "sha256:yyy");
+
+ var act = () => builder.Build();
+
+ act.Should().Throw();
+ }
+}
+
+public class SnapshotIdGeneratorTests
+{
+ [Fact]
+ public void GenerateId_DeterministicForSameContent()
+ {
+ var manifest = CreateTestManifest();
+
+ var id1 = _generator.GenerateId(manifest);
+ var id2 = _generator.GenerateId(manifest);
+
+ id1.Should().Be(id2);
+ }
+
+ [Fact]
+ public void GenerateId_DifferentForDifferentContent()
+ {
+ var manifest1 = CreateTestManifest() with { CreatedAt = DateTimeOffset.UtcNow };
+ var manifest2 = CreateTestManifest() with { CreatedAt = DateTimeOffset.UtcNow.AddSeconds(1) };
+
+ var id1 = _generator.GenerateId(manifest1);
+ var id2 = _generator.GenerateId(manifest2);
+
+ id1.Should().NotBe(id2);
+ }
+
+ [Fact]
+ public void ValidateId_ValidManifest_ReturnsTrue()
+ {
+ var manifest = new SnapshotBuilder(_hasher)
+ .WithEngine("test", "1.0", "abc")
+ .WithPolicy("p", "sha256:x")
+ .WithScoring("s", "sha256:y")
+ .WithAdvisoryFeed("nvd", "2025", "sha256:z")
+ .Build();
+
+ _generator.ValidateId(manifest).Should().BeTrue();
+ }
+
+ [Fact]
+ public void ValidateId_TamperedManifest_ReturnsFalse()
+ {
+ var manifest = CreateTestManifest();
+ var tampered = manifest with { Policy = manifest.Policy with { Digest = "sha256:tampered" } };
+
+ _generator.ValidateId(tampered).Should().BeFalse();
+ }
+}
+
+public class SnapshotServiceTests
+{
+ [Fact]
+ public async Task CreateSnapshot_PersistsManifest()
+ {
+ var builder = CreateBuilder();
+
+ var manifest = await _service.CreateSnapshotAsync(builder);
+
+ var retrieved = await _service.GetSnapshotAsync(manifest.SnapshotId);
+ retrieved.Should().NotBeNull();
+ }
+
+ [Fact]
+ public async Task SealSnapshot_AddsSignature()
+ {
+ var manifest = await _service.CreateSnapshotAsync(CreateBuilder());
+
+ var sealed = await _service.SealSnapshotAsync(manifest);
+
+ sealed.Signature.Should().NotBeNullOrEmpty();
+ }
+
+ [Fact]
+ public async Task VerifySnapshot_ValidSealed_ReturnsSuccess()
+ {
+ var manifest = await _service.CreateSnapshotAsync(CreateBuilder());
+ var sealed = await _service.SealSnapshotAsync(manifest);
+
+ var result = await _service.VerifySnapshotAsync(sealed);
+
+ result.IsValid.Should().BeTrue();
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] Builder tests for valid/invalid inputs
+- [ ] ID generator determinism tests
+- [ ] ID validation tests (valid and tampered)
+- [ ] Service create/seal/verify tests
+- [ ] All 8+ tests pass
+
+---
+
+## Delivery Tracker
+
+| # | Task ID | Status | Dependency | Owners | Task Definition |
+|---|---------|--------|------------|--------|-----------------|
+| 1 | T1 | TODO | — | Policy Team | Define KnowledgeSnapshotManifest |
+| 2 | T2 | TODO | — | Policy Team | Define KnowledgeSourceDescriptor |
+| 3 | T3 | TODO | T1, T2 | Policy Team | Create SnapshotBuilder |
+| 4 | T4 | TODO | T3 | Policy Team | Implement content-addressed ID |
+| 5 | T5 | TODO | T3, T4 | Policy Team | Create SnapshotService |
+| 6 | T6 | TODO | T5 | Policy Team | Integrate with PolicyEvaluator |
+| 7 | T7 | TODO | T6 | Policy Team | Add tests |
+
+---
+
+## Execution Log
+
+| Date (UTC) | Update | Owner |
+|------------|--------|-------|
+| 2025-12-21 | Sprint created from MOAT Phase 2 gap analysis. Knowledge snapshots identified as requirement from Knowledge Snapshots advisory. | Claude |
+
+---
+
+## Decisions & Risks
+
+| Item | Type | Owner | Notes |
+|------|------|-------|-------|
+| Content-addressed ID | Decision | Policy Team | ksm:sha256:{hash} format ensures immutability |
+| Canonical JSON | Decision | Policy Team | Sorted keys, no whitespace for determinism |
+| Signature exclusion | Decision | Policy Team | ID computed without signature field |
+| Source ordering | Decision | Policy Team | Sources sorted by name for determinism |
+
+---
+
+## Success Criteria
+
+- [ ] All 7 tasks marked DONE
+- [ ] Snapshot IDs are content-addressed
+- [ ] Manifests are deterministically serializable
+- [ ] Sealing adds verifiable signatures
+- [ ] Policy evaluator integrates snapshots
+- [ ] 8+ snapshot tests passing
+- [ ] `dotnet build` succeeds
+- [ ] `dotnet test` succeeds
diff --git a/docs/implplan/SPRINT_4100_0002_0002_replay_engine.md b/docs/implplan/SPRINT_4100_0002_0002_replay_engine.md
new file mode 100644
index 000000000..b878a83a5
--- /dev/null
+++ b/docs/implplan/SPRINT_4100_0002_0002_replay_engine.md
@@ -0,0 +1,1589 @@
+# Sprint 4100.0002.0002 · Replay Engine
+
+## Topic & Scope
+
+- Implement time-travel replay for policy evaluations
+- Enable re-evaluation with frozen knowledge inputs
+- Support determinism verification and audit
+
+**Working directory:** `src/Policy/__Libraries/StellaOps.Policy/Replay/`
+
+## Dependencies & Concurrency
+
+- **Upstream**: Sprint 4100.0002.0001 (Knowledge Snapshot Manifest) — MUST BE DONE
+- **Downstream**: None
+- **Safe to parallelize with**: Sprint 4100.0001.0002, Sprint 4100.0003.0002
+
+## Documentation Prerequisites
+
+- Sprint 4100.0002.0001 completion (KnowledgeSnapshotManifest)
+- `docs/product-advisories/20-Dec-2025 - Moat Explanation - Knowledge Snapshots and Time‑Travel Replay.md`
+- `docs/product-advisories/20-Dec-2025 - Moat Explanation - Guidelines for Product and Development Managers - Signed, Replayable Risk Verdicts.md`
+
+---
+
+## Tasks
+
+### T1: Define ReplayRequest
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Create the request model for replay operations.
+
+**Implementation Path**: `Replay/ReplayRequest.cs` (new file)
+
+**Model Definition**:
+```csharp
+namespace StellaOps.Policy.Replay;
+
+///
+/// Request to replay a policy evaluation with frozen inputs.
+///
+public sealed record ReplayRequest
+{
+ ///
+ /// The artifact to evaluate (same as original).
+ ///
+ public required string ArtifactDigest { get; init; }
+
+ ///
+ /// ID of the knowledge snapshot to use for replay.
+ ///
+ public required string SnapshotId { get; init; }
+
+ ///
+ /// Original verdict ID being replayed (for comparison).
+ ///
+ public string? OriginalVerdictId { get; init; }
+
+ ///
+ /// Replay options.
+ ///
+ public ReplayOptions Options { get; init; } = ReplayOptions.Default;
+}
+
+///
+/// Options controlling replay behavior.
+///
+public sealed record ReplayOptions
+{
+ ///
+ /// Whether to compare with original verdict.
+ ///
+ public bool CompareWithOriginal { get; init; } = true;
+
+ ///
+ /// Whether to allow network access for missing sources.
+ ///
+ public bool AllowNetworkFetch { get; init; } = false;
+
+ ///
+ /// Whether to generate detailed diff report.
+ ///
+ public bool GenerateDetailedReport { get; init; } = true;
+
+ ///
+ /// Tolerance for score differences (for floating point comparison).
+ ///
+ public decimal ScoreTolerance { get; init; } = 0.001m;
+
+ public static ReplayOptions Default { get; } = new();
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `ReplayRequest.cs` file created in `Replay/` directory
+- [ ] Artifact digest and snapshot ID required
+- [ ] Original verdict ID optional for comparison
+- [ ] Options for controlling replay behavior
+- [ ] Default options defined
+
+---
+
+### T2: Define ReplayResult
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Create the result model for replay operations.
+
+**Implementation Path**: `Replay/ReplayResult.cs` (new file)
+
+**Model Definition**:
+```csharp
+namespace StellaOps.Policy.Replay;
+
+///
+/// Result of a replay operation.
+///
+public sealed record ReplayResult
+{
+ ///
+ /// Whether the replay matched the original verdict.
+ ///
+ public required ReplayMatchStatus MatchStatus { get; init; }
+
+ ///
+ /// The verdict produced by replay.
+ ///
+ public required PolicyEvaluationResult ReplayedVerdict { get; init; }
+
+ ///
+ /// The original verdict (if available for comparison).
+ ///
+ public PolicyEvaluationResult? OriginalVerdict { get; init; }
+
+ ///
+ /// Detailed delta report if differences found.
+ ///
+ public ReplayDeltaReport? DeltaReport { get; init; }
+
+ ///
+ /// Snapshot used for replay.
+ ///
+ public required string SnapshotId { get; init; }
+
+ ///
+ /// When replay was executed.
+ ///
+ public required DateTimeOffset ReplayedAt { get; init; }
+
+ ///
+ /// Duration of replay execution.
+ ///
+ public TimeSpan Duration { get; init; }
+}
+
+///
+/// Match status between replayed and original verdict.
+///
+public enum ReplayMatchStatus
+{
+ ///
+ /// Verdicts match exactly (deterministic).
+ ///
+ ExactMatch,
+
+ ///
+ /// Verdicts match within tolerance.
+ ///
+ MatchWithinTolerance,
+
+ ///
+ /// Verdicts differ (non-deterministic or inputs changed).
+ ///
+ Mismatch,
+
+ ///
+ /// Original verdict not available for comparison.
+ ///
+ NoComparison,
+
+ ///
+ /// Replay failed due to missing inputs.
+ ///
+ ReplayFailed
+}
+
+///
+/// Detailed report of differences between replayed and original.
+///
+public sealed record ReplayDeltaReport
+{
+ ///
+ /// Summary of the difference.
+ ///
+ public required string Summary { get; init; }
+
+ ///
+ /// Specific fields that differ.
+ ///
+ public IReadOnlyList FieldDeltas { get; init; } = [];
+
+ ///
+ /// Findings that differ.
+ ///
+ public IReadOnlyList FindingDeltas { get; init; } = [];
+
+ ///
+ /// Input sources that may have caused difference.
+ ///
+ public IReadOnlyList SuspectedCauses { get; init; } = [];
+}
+
+public sealed record FieldDelta(
+ string FieldName,
+ string OriginalValue,
+ string ReplayedValue);
+
+public sealed record FindingDelta(
+ string FindingId,
+ DeltaType Type,
+ string? Description);
+
+public enum DeltaType
+{
+ Added,
+ Removed,
+ Modified
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `ReplayResult.cs` file created
+- [ ] Match status enum with all states
+- [ ] Delta report with field and finding differences
+- [ ] Suspected causes for non-determinism
+- [ ] Duration tracking for performance
+
+---
+
+### T3: Create ReplayEngine Service
+
+**Assignee**: Policy Team
+**Story Points**: 5
+**Status**: TODO
+**Dependencies**: T1, T2
+
+**Description**:
+Implement the core replay engine that orchestrates frozen evaluation.
+
+**Implementation Path**: `Replay/ReplayEngine.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.Replay;
+
+///
+/// Engine for replaying policy evaluations with frozen inputs.
+///
+public sealed class ReplayEngine : IReplayEngine
+{
+ private readonly ISnapshotService _snapshotService;
+ private readonly IPolicyEvaluator _evaluator;
+ private readonly IVerdictStore _verdictStore;
+ private readonly IKnowledgeSourceResolver _sourceResolver;
+ private readonly ILogger _logger;
+
+ public ReplayEngine(
+ ISnapshotService snapshotService,
+ IPolicyEvaluator evaluator,
+ IVerdictStore verdictStore,
+ IKnowledgeSourceResolver sourceResolver,
+ ILogger logger)
+ {
+ _snapshotService = snapshotService;
+ _evaluator = evaluator;
+ _verdictStore = verdictStore;
+ _sourceResolver = sourceResolver;
+ _logger = logger;
+ }
+
+ ///
+ /// Replays a policy evaluation with frozen inputs from a snapshot.
+ ///
+ public async Task ReplayAsync(
+ ReplayRequest request,
+ CancellationToken ct = default)
+ {
+ var stopwatch = Stopwatch.StartNew();
+
+ _logger.LogInformation(
+ "Starting replay for artifact {Artifact} with snapshot {Snapshot}",
+ request.ArtifactDigest, request.SnapshotId);
+
+ // Step 1: Load and verify snapshot
+ var snapshot = await LoadAndVerifySnapshotAsync(request.SnapshotId, ct);
+ if (snapshot is null)
+ {
+ return CreateFailedResult(request, "Snapshot not found or invalid");
+ }
+
+ // Step 2: Resolve frozen inputs from snapshot
+ var frozenInputs = await ResolveFrozenInputsAsync(snapshot, request.Options, ct);
+ if (!frozenInputs.IsComplete)
+ {
+ return CreateFailedResult(request, $"Missing inputs: {string.Join(", ", frozenInputs.MissingSources)}");
+ }
+
+ // Step 3: Execute evaluation with frozen inputs
+ var replayedVerdict = await _evaluator.EvaluateWithFrozenInputsAsync(
+ request.ArtifactDigest,
+ frozenInputs,
+ ct);
+
+ // Step 4: Load original verdict for comparison (if requested)
+ PolicyEvaluationResult? originalVerdict = null;
+ if (request.OriginalVerdictId is not null && request.Options.CompareWithOriginal)
+ {
+ originalVerdict = await _verdictStore.GetAsync(request.OriginalVerdictId, ct);
+ }
+
+ // Step 5: Compare and generate result
+ var matchStatus = CompareVerdicts(replayedVerdict, originalVerdict, request.Options);
+ var deltaReport = matchStatus == ReplayMatchStatus.Mismatch && request.Options.GenerateDetailedReport
+ ? GenerateDeltaReport(replayedVerdict, originalVerdict!)
+ : null;
+
+ stopwatch.Stop();
+
+ return new ReplayResult
+ {
+ MatchStatus = matchStatus,
+ ReplayedVerdict = replayedVerdict,
+ OriginalVerdict = originalVerdict,
+ DeltaReport = deltaReport,
+ SnapshotId = request.SnapshotId,
+ ReplayedAt = DateTimeOffset.UtcNow,
+ Duration = stopwatch.Elapsed
+ };
+ }
+
+ private async Task LoadAndVerifySnapshotAsync(
+ string snapshotId, CancellationToken ct)
+ {
+ var snapshot = await _snapshotService.GetSnapshotAsync(snapshotId, ct);
+ if (snapshot is null)
+ return null;
+
+ var verification = await _snapshotService.VerifySnapshotAsync(snapshot, ct);
+ if (!verification.IsValid)
+ {
+ _logger.LogWarning("Snapshot {SnapshotId} verification failed: {Error}",
+ snapshotId, verification.Error);
+ return null;
+ }
+
+ return snapshot;
+ }
+
+ private async Task ResolveFrozenInputsAsync(
+ KnowledgeSnapshotManifest snapshot,
+ ReplayOptions options,
+ CancellationToken ct)
+ {
+ var inputs = new FrozenInputsBuilder();
+ var missingSources = new List();
+
+ foreach (var source in snapshot.Sources)
+ {
+ var resolved = await _sourceResolver.ResolveAsync(source, options.AllowNetworkFetch, ct);
+ if (resolved is not null)
+ {
+ inputs.AddSource(source.Name, resolved);
+ }
+ else
+ {
+ missingSources.Add($"{source.Name}:{source.Digest}");
+ }
+ }
+
+ return inputs.Build(missingSources);
+ }
+
+ private ReplayMatchStatus CompareVerdicts(
+ PolicyEvaluationResult replayed,
+ PolicyEvaluationResult? original,
+ ReplayOptions options)
+ {
+ if (original is null)
+ return ReplayMatchStatus.NoComparison;
+
+ // Compare decision
+ if (replayed.Decision != original.Decision)
+ return ReplayMatchStatus.Mismatch;
+
+ // Compare score with tolerance
+ if (Math.Abs(replayed.Score - original.Score) > options.ScoreTolerance)
+ return ReplayMatchStatus.MatchWithinTolerance;
+
+ // Compare findings
+ if (!FindingsMatch(replayed.Findings, original.Findings))
+ return ReplayMatchStatus.Mismatch;
+
+ return ReplayMatchStatus.ExactMatch;
+ }
+
+ private bool FindingsMatch(
+ IReadOnlyList replayed,
+ IReadOnlyList original)
+ {
+ if (replayed.Count != original.Count)
+ return false;
+
+ var replayedIds = replayed.Select(f => f.Id).OrderBy(x => x).ToList();
+ var originalIds = original.Select(f => f.Id).OrderBy(x => x).ToList();
+
+ return replayedIds.SequenceEqual(originalIds);
+ }
+
+ private ReplayDeltaReport GenerateDeltaReport(
+ PolicyEvaluationResult replayed,
+ PolicyEvaluationResult original)
+ {
+ var fieldDeltas = new List();
+ var findingDeltas = new List();
+ var suspectedCauses = new List();
+
+ // Compare scalar fields
+ if (replayed.Decision != original.Decision)
+ fieldDeltas.Add(new FieldDelta("Decision", original.Decision.ToString(), replayed.Decision.ToString()));
+
+ if (replayed.Score != original.Score)
+ fieldDeltas.Add(new FieldDelta("Score", original.Score.ToString(), replayed.Score.ToString()));
+
+ // Compare findings
+ var replayedIds = replayed.Findings.Select(f => f.Id).ToHashSet();
+ var originalIds = original.Findings.Select(f => f.Id).ToHashSet();
+
+ foreach (var added in replayedIds.Except(originalIds))
+ findingDeltas.Add(new FindingDelta(added, DeltaType.Added, null));
+
+ foreach (var removed in originalIds.Except(replayedIds))
+ findingDeltas.Add(new FindingDelta(removed, DeltaType.Removed, null));
+
+ // Infer suspected causes
+ if (findingDeltas.Count > 0)
+ suspectedCauses.Add("Advisory data differences");
+
+ return new ReplayDeltaReport
+ {
+ Summary = $"{fieldDeltas.Count} field(s) and {findingDeltas.Count} finding(s) differ",
+ FieldDeltas = fieldDeltas,
+ FindingDeltas = findingDeltas,
+ SuspectedCauses = suspectedCauses
+ };
+ }
+
+ private ReplayResult CreateFailedResult(ReplayRequest request, string error)
+ {
+ _logger.LogWarning("Replay failed for {Artifact}: {Error}",
+ request.ArtifactDigest, error);
+
+ return new ReplayResult
+ {
+ MatchStatus = ReplayMatchStatus.ReplayFailed,
+ ReplayedVerdict = PolicyEvaluationResult.Empty,
+ SnapshotId = request.SnapshotId,
+ ReplayedAt = DateTimeOffset.UtcNow,
+ DeltaReport = new ReplayDeltaReport
+ {
+ Summary = error,
+ SuspectedCauses = new[] { error }
+ }
+ };
+ }
+}
+
+public interface IReplayEngine
+{
+ Task ReplayAsync(ReplayRequest request, CancellationToken ct = default);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `ReplayEngine.cs` created in `Replay/`
+- [ ] Snapshot loading and verification
+- [ ] Frozen input resolution from snapshot sources
+- [ ] Evaluation with frozen inputs
+- [ ] Verdict comparison with configurable tolerance
+- [ ] Delta report generation for mismatches
+- [ ] Logging for observability
+
+---
+
+### T4: Implement Input Resolution
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T3
+
+**Description**:
+Implement resolution of exact inputs from snapshot sources.
+
+**Implementation Path**: `Replay/KnowledgeSourceResolver.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.Replay;
+
+///
+/// Resolves knowledge sources from snapshot descriptors.
+///
+public sealed class KnowledgeSourceResolver : IKnowledgeSourceResolver
+{
+ private readonly ISnapshotStore _snapshotStore;
+ private readonly IAdvisoryFeedStore _feedStore;
+ private readonly IVexStore _vexStore;
+ private readonly IHttpClientFactory _httpClientFactory;
+ private readonly ILogger _logger;
+
+ public KnowledgeSourceResolver(
+ ISnapshotStore snapshotStore,
+ IAdvisoryFeedStore feedStore,
+ IVexStore vexStore,
+ IHttpClientFactory httpClientFactory,
+ ILogger logger)
+ {
+ _snapshotStore = snapshotStore;
+ _feedStore = feedStore;
+ _vexStore = vexStore;
+ _httpClientFactory = httpClientFactory;
+ _logger = logger;
+ }
+
+ ///
+ /// Resolves a knowledge source to its actual content.
+ ///
+ public async Task ResolveAsync(
+ KnowledgeSourceDescriptor descriptor,
+ bool allowNetworkFetch,
+ CancellationToken ct = default)
+ {
+ _logger.LogDebug("Resolving source {Name} ({Type})", descriptor.Name, descriptor.Type);
+
+ // Try bundled content first
+ if (descriptor.InclusionMode != SourceInclusionMode.Referenced &&
+ descriptor.BundlePath is not null)
+ {
+ var bundled = await ResolveBundledAsync(descriptor, ct);
+ if (bundled is not null)
+ return bundled;
+ }
+
+ // Try local store by digest
+ var local = await ResolveFromLocalStoreAsync(descriptor, ct);
+ if (local is not null)
+ return local;
+
+ // Try network fetch if allowed
+ if (allowNetworkFetch && descriptor.Origin is not null)
+ {
+ var fetched = await FetchFromOriginAsync(descriptor, ct);
+ if (fetched is not null)
+ return fetched;
+ }
+
+ _logger.LogWarning("Failed to resolve source {Name} with digest {Digest}",
+ descriptor.Name, descriptor.Digest);
+
+ return null;
+ }
+
+ private async Task ResolveBundledAsync(
+ KnowledgeSourceDescriptor descriptor,
+ CancellationToken ct)
+ {
+ try
+ {
+ var content = await _snapshotStore.GetBundledContentAsync(
+ descriptor.BundlePath!, ct);
+
+ if (content is null)
+ return null;
+
+ // Verify digest
+ var actualDigest = ComputeDigest(content);
+ if (actualDigest != descriptor.Digest)
+ {
+ _logger.LogWarning(
+ "Bundled source {Name} digest mismatch: expected {Expected}, got {Actual}",
+ descriptor.Name, descriptor.Digest, actualDigest);
+ return null;
+ }
+
+ return new ResolvedSource(
+ descriptor.Name,
+ descriptor.Type,
+ content,
+ SourceResolutionMethod.Bundled);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogWarning(ex, "Failed to resolve bundled source {Name}", descriptor.Name);
+ return null;
+ }
+ }
+
+ private async Task ResolveFromLocalStoreAsync(
+ KnowledgeSourceDescriptor descriptor,
+ CancellationToken ct)
+ {
+ return descriptor.Type switch
+ {
+ "advisory-feed" => await ResolveFeedAsync(descriptor, ct),
+ "vex" => await ResolveVexAsync(descriptor, ct),
+ _ => null
+ };
+ }
+
+ private async Task ResolveFeedAsync(
+ KnowledgeSourceDescriptor descriptor,
+ CancellationToken ct)
+ {
+ var feed = await _feedStore.GetByDigestAsync(descriptor.Digest, ct);
+ if (feed is null)
+ return null;
+
+ return new ResolvedSource(
+ descriptor.Name,
+ descriptor.Type,
+ feed.Content,
+ SourceResolutionMethod.LocalStore);
+ }
+
+ private async Task ResolveVexAsync(
+ KnowledgeSourceDescriptor descriptor,
+ CancellationToken ct)
+ {
+ var vex = await _vexStore.GetByDigestAsync(descriptor.Digest, ct);
+ if (vex is null)
+ return null;
+
+ return new ResolvedSource(
+ descriptor.Name,
+ descriptor.Type,
+ vex.Content,
+ SourceResolutionMethod.LocalStore);
+ }
+
+ private async Task FetchFromOriginAsync(
+ KnowledgeSourceDescriptor descriptor,
+ CancellationToken ct)
+ {
+ try
+ {
+ var client = _httpClientFactory.CreateClient("replay");
+ var response = await client.GetAsync(descriptor.Origin, ct);
+ response.EnsureSuccessStatusCode();
+
+ var content = await response.Content.ReadAsByteArrayAsync(ct);
+
+ // Verify digest
+ var actualDigest = ComputeDigest(content);
+ if (actualDigest != descriptor.Digest)
+ {
+ _logger.LogWarning(
+ "Fetched source {Name} digest mismatch: expected {Expected}, got {Actual}",
+ descriptor.Name, descriptor.Digest, actualDigest);
+ return null;
+ }
+
+ return new ResolvedSource(
+ descriptor.Name,
+ descriptor.Type,
+ content,
+ SourceResolutionMethod.NetworkFetch);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogWarning(ex, "Failed to fetch source {Name} from {Origin}",
+ descriptor.Name, descriptor.Origin);
+ return null;
+ }
+ }
+
+ private static string ComputeDigest(byte[] content)
+ {
+ var hash = SHA256.HashData(content);
+ return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
+ }
+}
+
+public sealed record ResolvedSource(
+ string Name,
+ string Type,
+ byte[] Content,
+ SourceResolutionMethod Method);
+
+public enum SourceResolutionMethod
+{
+ Bundled,
+ LocalStore,
+ NetworkFetch
+}
+
+public interface IKnowledgeSourceResolver
+{
+ Task ResolveAsync(
+ KnowledgeSourceDescriptor descriptor,
+ bool allowNetworkFetch,
+ CancellationToken ct = default);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `KnowledgeSourceResolver.cs` created
+- [ ] Resolution order: bundled → local store → network
+- [ ] Digest verification on all resolved content
+- [ ] Network fetch controlled by flag
+- [ ] Resolution method tracked for audit
+- [ ] Logging for observability
+
+---
+
+### T5: Implement Comparison Logic
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T3
+
+**Description**:
+Implement detailed comparison logic to detect determinism violations.
+
+**Implementation Path**: `Replay/VerdictComparer.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.Replay;
+
+///
+/// Compares policy evaluation results for determinism verification.
+///
+public sealed class VerdictComparer : IVerdictComparer
+{
+ ///
+ /// Compares two verdicts and returns detailed comparison result.
+ ///
+ public VerdictComparisonResult Compare(
+ PolicyEvaluationResult replayed,
+ PolicyEvaluationResult original,
+ VerdictComparisonOptions options)
+ {
+ var differences = new List();
+
+ // Compare decision
+ if (replayed.Decision != original.Decision)
+ {
+ differences.Add(new VerdictDifference(
+ "Decision",
+ DifferenceCategory.Critical,
+ original.Decision.ToString(),
+ replayed.Decision.ToString()));
+ }
+
+ // Compare score with tolerance
+ var scoreDiff = Math.Abs(replayed.Score - original.Score);
+ if (scoreDiff > options.ScoreTolerance)
+ {
+ differences.Add(new VerdictDifference(
+ "Score",
+ scoreDiff > options.CriticalScoreTolerance
+ ? DifferenceCategory.Critical
+ : DifferenceCategory.Minor,
+ original.Score.ToString("F4"),
+ replayed.Score.ToString("F4")));
+ }
+
+ // Compare findings
+ var findingDiffs = CompareFindingLists(replayed.Findings, original.Findings);
+ differences.AddRange(findingDiffs);
+
+ // Compare unknowns summary
+ if (replayed.UnknownBudgetStatus is not null && original.UnknownBudgetStatus is not null)
+ {
+ var unknownDiffs = CompareUnknownsSummary(
+ replayed.UnknownBudgetStatus,
+ original.UnknownBudgetStatus);
+ differences.AddRange(unknownDiffs);
+ }
+
+ // Determine overall match status
+ var matchStatus = DetermineMatchStatus(differences, options);
+
+ return new VerdictComparisonResult
+ {
+ MatchStatus = matchStatus,
+ Differences = differences,
+ IsDeterministic = matchStatus == ReplayMatchStatus.ExactMatch,
+ DeterminismConfidence = CalculateDeterminismConfidence(differences)
+ };
+ }
+
+ private IEnumerable CompareFindingLists(
+ IReadOnlyList replayed,
+ IReadOnlyList original)
+ {
+ var replayedMap = replayed.ToDictionary(f => f.Id);
+ var originalMap = original.ToDictionary(f => f.Id);
+
+ // Findings added in replay
+ foreach (var id in replayedMap.Keys.Except(originalMap.Keys))
+ {
+ yield return new VerdictDifference(
+ $"Finding:{id}",
+ DifferenceCategory.Finding,
+ "absent",
+ "present");
+ }
+
+ // Findings removed in replay
+ foreach (var id in originalMap.Keys.Except(replayedMap.Keys))
+ {
+ yield return new VerdictDifference(
+ $"Finding:{id}",
+ DifferenceCategory.Finding,
+ "present",
+ "absent");
+ }
+
+ // Findings present in both - compare details
+ foreach (var id in replayedMap.Keys.Intersect(originalMap.Keys))
+ {
+ var replayedFinding = replayedMap[id];
+ var originalFinding = originalMap[id];
+
+ if (replayedFinding.Severity != originalFinding.Severity)
+ {
+ yield return new VerdictDifference(
+ $"Finding:{id}:Severity",
+ DifferenceCategory.Minor,
+ originalFinding.Severity.ToString(),
+ replayedFinding.Severity.ToString());
+ }
+ }
+ }
+
+ private IEnumerable CompareUnknownsSummary(
+ BudgetStatusSummary replayed,
+ BudgetStatusSummary original)
+ {
+ if (replayed.TotalUnknowns != original.TotalUnknowns)
+ {
+ yield return new VerdictDifference(
+ "Unknowns:Total",
+ DifferenceCategory.Minor,
+ original.TotalUnknowns.ToString(),
+ replayed.TotalUnknowns.ToString());
+ }
+
+ if (replayed.IsExceeded != original.IsExceeded)
+ {
+ yield return new VerdictDifference(
+ "Unknowns:BudgetExceeded",
+ DifferenceCategory.Critical,
+ original.IsExceeded.ToString(),
+ replayed.IsExceeded.ToString());
+ }
+ }
+
+ private ReplayMatchStatus DetermineMatchStatus(
+ List differences,
+ VerdictComparisonOptions options)
+ {
+ if (differences.Count == 0)
+ return ReplayMatchStatus.ExactMatch;
+
+ if (differences.Any(d => d.Category == DifferenceCategory.Critical))
+ return ReplayMatchStatus.Mismatch;
+
+ if (options.TreatMinorAsMatch &&
+ differences.All(d => d.Category == DifferenceCategory.Minor))
+ return ReplayMatchStatus.MatchWithinTolerance;
+
+ return ReplayMatchStatus.Mismatch;
+ }
+
+ private decimal CalculateDeterminismConfidence(List differences)
+ {
+ if (differences.Count == 0)
+ return 1.0m;
+
+ var criticalCount = differences.Count(d => d.Category == DifferenceCategory.Critical);
+ var minorCount = differences.Count(d => d.Category == DifferenceCategory.Minor);
+
+ // Simple confidence calculation
+ var penalty = (criticalCount * 0.3m) + (minorCount * 0.05m);
+ return Math.Max(0, 1.0m - penalty);
+ }
+}
+
+public sealed record VerdictComparisonResult
+{
+ public required ReplayMatchStatus MatchStatus { get; init; }
+ public required IReadOnlyList Differences { get; init; }
+ public required bool IsDeterministic { get; init; }
+ public required decimal DeterminismConfidence { get; init; }
+}
+
+public sealed record VerdictDifference(
+ string Field,
+ DifferenceCategory Category,
+ string OriginalValue,
+ string ReplayedValue);
+
+public enum DifferenceCategory
+{
+ Critical,
+ Minor,
+ Finding
+}
+
+public sealed record VerdictComparisonOptions
+{
+ public decimal ScoreTolerance { get; init; } = 0.001m;
+ public decimal CriticalScoreTolerance { get; init; } = 0.1m;
+ public bool TreatMinorAsMatch { get; init; } = true;
+
+ public static VerdictComparisonOptions Default { get; } = new();
+}
+
+public interface IVerdictComparer
+{
+ VerdictComparisonResult Compare(
+ PolicyEvaluationResult replayed,
+ PolicyEvaluationResult original,
+ VerdictComparisonOptions options);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `VerdictComparer.cs` created
+- [ ] Decision comparison (critical difference)
+- [ ] Score comparison with configurable tolerance
+- [ ] Finding list comparison (added/removed/modified)
+- [ ] Unknowns summary comparison
+- [ ] Determinism confidence calculation
+- [ ] Difference categorization (critical vs minor)
+
+---
+
+### T6: Create ReplayReport
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T5
+
+**Description**:
+Create detailed report format for replay results.
+
+**Implementation Path**: `Replay/ReplayReport.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.Replay;
+
+///
+/// Detailed report of a replay operation.
+///
+public sealed record ReplayReport
+{
+ ///
+ /// Report ID for reference.
+ ///
+ public required string ReportId { get; init; }
+
+ ///
+ /// When the report was generated.
+ ///
+ public required DateTimeOffset GeneratedAt { get; init; }
+
+ ///
+ /// Artifact that was evaluated.
+ ///
+ public required string ArtifactDigest { get; init; }
+
+ ///
+ /// Snapshot used for replay.
+ ///
+ public required string SnapshotId { get; init; }
+
+ ///
+ /// Original verdict ID (if compared).
+ ///
+ public string? OriginalVerdictId { get; init; }
+
+ ///
+ /// Overall match status.
+ ///
+ public required ReplayMatchStatus MatchStatus { get; init; }
+
+ ///
+ /// Whether the evaluation is deterministic.
+ ///
+ public required bool IsDeterministic { get; init; }
+
+ ///
+ /// Confidence level in determinism (0.0 to 1.0).
+ ///
+ public required decimal DeterminismConfidence { get; init; }
+
+ ///
+ /// Summary of differences found.
+ ///
+ public required DifferenceSummary Differences { get; init; }
+
+ ///
+ /// Input resolution details.
+ ///
+ public required InputResolutionSummary InputResolution { get; init; }
+
+ ///
+ /// Execution timing.
+ ///
+ public required ExecutionTiming Timing { get; init; }
+
+ ///
+ /// Recommendations based on results.
+ ///
+ public IReadOnlyList Recommendations { get; init; } = [];
+}
+
+public sealed record DifferenceSummary
+{
+ public int TotalDifferences { get; init; }
+ public int CriticalDifferences { get; init; }
+ public int MinorDifferences { get; init; }
+ public int FindingDifferences { get; init; }
+ public IReadOnlyList TopDifferences { get; init; } = [];
+}
+
+public sealed record InputResolutionSummary
+{
+ public int TotalSources { get; init; }
+ public int ResolvedFromBundle { get; init; }
+ public int ResolvedFromLocalStore { get; init; }
+ public int ResolvedFromNetwork { get; init; }
+ public int FailedToResolve { get; init; }
+ public IReadOnlyList MissingSources { get; init; } = [];
+}
+
+public sealed record ExecutionTiming
+{
+ public TimeSpan TotalDuration { get; init; }
+ public TimeSpan SnapshotLoadTime { get; init; }
+ public TimeSpan InputResolutionTime { get; init; }
+ public TimeSpan EvaluationTime { get; init; }
+ public TimeSpan ComparisonTime { get; init; }
+}
+
+///
+/// Builder for creating replay reports.
+///
+public sealed class ReplayReportBuilder
+{
+ private readonly ReplayResult _result;
+ private readonly ReplayRequest _request;
+ private readonly List _recommendations = [];
+
+ public ReplayReportBuilder(ReplayRequest request, ReplayResult result)
+ {
+ _request = request;
+ _result = result;
+ }
+
+ public ReplayReportBuilder AddRecommendation(string recommendation)
+ {
+ _recommendations.Add(recommendation);
+ return this;
+ }
+
+ public ReplayReportBuilder AddRecommendationsFromResult()
+ {
+ if (_result.MatchStatus == ReplayMatchStatus.Mismatch)
+ {
+ _recommendations.Add("Review the delta report to identify non-deterministic behavior");
+ _recommendations.Add("Check if advisory feeds have been updated since the original evaluation");
+ }
+
+ if (_result.MatchStatus == ReplayMatchStatus.ReplayFailed)
+ {
+ _recommendations.Add("Ensure the snapshot bundle is complete and accessible");
+ _recommendations.Add("Consider enabling network fetch for missing sources");
+ }
+
+ return this;
+ }
+
+ public ReplayReport Build()
+ {
+ return new ReplayReport
+ {
+ ReportId = $"rpt:{Guid.NewGuid():N}",
+ GeneratedAt = DateTimeOffset.UtcNow,
+ ArtifactDigest = _request.ArtifactDigest,
+ SnapshotId = _request.SnapshotId,
+ OriginalVerdictId = _request.OriginalVerdictId,
+ MatchStatus = _result.MatchStatus,
+ IsDeterministic = _result.MatchStatus == ReplayMatchStatus.ExactMatch,
+ DeterminismConfidence = CalculateConfidence(),
+ Differences = BuildDifferenceSummary(),
+ InputResolution = BuildInputResolutionSummary(),
+ Timing = BuildExecutionTiming(),
+ Recommendations = _recommendations
+ };
+ }
+
+ private decimal CalculateConfidence() =>
+ _result.MatchStatus switch
+ {
+ ReplayMatchStatus.ExactMatch => 1.0m,
+ ReplayMatchStatus.MatchWithinTolerance => 0.9m,
+ ReplayMatchStatus.Mismatch => 0.0m,
+ ReplayMatchStatus.NoComparison => 0.5m,
+ ReplayMatchStatus.ReplayFailed => 0.0m,
+ _ => 0.5m
+ };
+
+ private DifferenceSummary BuildDifferenceSummary()
+ {
+ if (_result.DeltaReport is null)
+ return new DifferenceSummary();
+
+ var fieldDeltas = _result.DeltaReport.FieldDeltas;
+ var findingDeltas = _result.DeltaReport.FindingDeltas;
+
+ return new DifferenceSummary
+ {
+ TotalDifferences = fieldDeltas.Count + findingDeltas.Count,
+ CriticalDifferences = fieldDeltas.Count(d => d.FieldName is "Decision" or "Score"),
+ MinorDifferences = fieldDeltas.Count(d => d.FieldName is not "Decision" and not "Score"),
+ FindingDifferences = findingDeltas.Count
+ };
+ }
+
+ private InputResolutionSummary BuildInputResolutionSummary()
+ {
+ // This would be populated from actual resolution data
+ return new InputResolutionSummary
+ {
+ TotalSources = 0,
+ ResolvedFromBundle = 0,
+ ResolvedFromLocalStore = 0,
+ ResolvedFromNetwork = 0,
+ FailedToResolve = 0,
+ MissingSources = _result.DeltaReport?.SuspectedCauses ?? []
+ };
+ }
+
+ private ExecutionTiming BuildExecutionTiming()
+ {
+ return new ExecutionTiming
+ {
+ TotalDuration = _result.Duration
+ };
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `ReplayReport.cs` created
+- [ ] Comprehensive report structure with all metadata
+- [ ] Difference summary with categorization
+- [ ] Input resolution summary
+- [ ] Execution timing breakdown
+- [ ] Recommendation generation
+- [ ] Report builder for easy construction
+
+---
+
+### T7: Add CLI Command
+
+**Assignee**: CLI Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T3, T6
+
+**Description**:
+Add CLI command for replay operations.
+
+**Implementation Path**: `src/Cli/StellaOps.Cli/Commands/ReplayCommand.cs`
+
+**Implementation**:
+```csharp
+namespace StellaOps.Cli.Commands;
+
+[Command("replay", Description = "Replay a policy evaluation with frozen inputs")]
+public class ReplayCommand : ICommand
+{
+ [Option("--verdict", Description = "Original verdict ID to replay")]
+ public string? VerdictId { get; set; }
+
+ [Option("--snapshot", Description = "Snapshot ID to use")]
+ public string? SnapshotId { get; set; }
+
+ [Option("--artifact", Description = "Artifact digest to evaluate")]
+ public string? ArtifactDigest { get; set; }
+
+ [Option("--allow-network", Description = "Allow network fetch for missing sources")]
+ public bool AllowNetwork { get; set; }
+
+ [Option("--output", Description = "Output format: text, json, or report")]
+ public string Output { get; set; } = "text";
+
+ [Option("--report-file", Description = "Write detailed report to file")]
+ public string? ReportFile { get; set; }
+
+ private readonly IReplayEngine _replayEngine;
+ private readonly IVerdictStore _verdictStore;
+ private readonly IConsole _console;
+
+ public async Task ExecuteAsync(CancellationToken ct)
+ {
+ // Resolve inputs
+ var request = await BuildRequestAsync(ct);
+ if (request is null)
+ {
+ _console.WriteError("Could not determine replay parameters");
+ return;
+ }
+
+ // Execute replay
+ _console.WriteLine($"Replaying evaluation for {request.ArtifactDigest}...");
+ var result = await _replayEngine.ReplayAsync(request, ct);
+
+ // Generate report
+ var report = new ReplayReportBuilder(request, result)
+ .AddRecommendationsFromResult()
+ .Build();
+
+ // Output results
+ switch (Output.ToLowerInvariant())
+ {
+ case "json":
+ OutputJson(result);
+ break;
+ case "report":
+ OutputReport(report);
+ break;
+ default:
+ OutputText(result, report);
+ break;
+ }
+
+ // Write report file if requested
+ if (ReportFile is not null)
+ {
+ var json = JsonSerializer.Serialize(report, new JsonSerializerOptions { WriteIndented = true });
+ await File.WriteAllTextAsync(ReportFile, json, ct);
+ _console.WriteLine($"Report written to {ReportFile}");
+ }
+ }
+
+ private async Task BuildRequestAsync(CancellationToken ct)
+ {
+ // If verdict ID provided, load verdict to get artifact and snapshot
+ if (VerdictId is not null)
+ {
+ var verdict = await _verdictStore.GetAsync(VerdictId, ct);
+ if (verdict is null)
+ {
+ _console.WriteError($"Verdict {VerdictId} not found");
+ return null;
+ }
+
+ return new ReplayRequest
+ {
+ ArtifactDigest = verdict.ArtifactDigest,
+ SnapshotId = verdict.KnowledgeSnapshotId ?? SnapshotId ?? throw new InvalidOperationException("Snapshot ID required"),
+ OriginalVerdictId = VerdictId,
+ Options = new ReplayOptions { AllowNetworkFetch = AllowNetwork }
+ };
+ }
+
+ // Otherwise, require explicit artifact and snapshot
+ if (ArtifactDigest is null || SnapshotId is null)
+ {
+ _console.WriteError("Either --verdict or both --artifact and --snapshot required");
+ return null;
+ }
+
+ return new ReplayRequest
+ {
+ ArtifactDigest = ArtifactDigest,
+ SnapshotId = SnapshotId,
+ Options = new ReplayOptions { AllowNetworkFetch = AllowNetwork }
+ };
+ }
+
+ private void OutputText(ReplayResult result, ReplayReport report)
+ {
+ var statusSymbol = result.MatchStatus switch
+ {
+ ReplayMatchStatus.ExactMatch => "[OK]",
+ ReplayMatchStatus.MatchWithinTolerance => "[~OK]",
+ ReplayMatchStatus.Mismatch => "[MISMATCH]",
+ ReplayMatchStatus.NoComparison => "[N/A]",
+ ReplayMatchStatus.ReplayFailed => "[FAILED]",
+ _ => "[?]"
+ };
+
+ _console.WriteLine($"Replay Status: {statusSymbol} {result.MatchStatus}");
+ _console.WriteLine($"Determinism Confidence: {report.DeterminismConfidence:P0}");
+ _console.WriteLine($"Duration: {result.Duration.TotalMilliseconds:F0}ms");
+
+ if (result.DeltaReport is not null && result.DeltaReport.FieldDeltas.Count > 0)
+ {
+ _console.WriteLine("\nDifferences:");
+ foreach (var delta in result.DeltaReport.FieldDeltas)
+ {
+ _console.WriteLine($" {delta.FieldName}: {delta.OriginalValue} → {delta.ReplayedValue}");
+ }
+ }
+
+ if (report.Recommendations.Count > 0)
+ {
+ _console.WriteLine("\nRecommendations:");
+ foreach (var rec in report.Recommendations)
+ {
+ _console.WriteLine($" - {rec}");
+ }
+ }
+ }
+
+ private void OutputJson(ReplayResult result)
+ {
+ var json = JsonSerializer.Serialize(result, new JsonSerializerOptions { WriteIndented = true });
+ _console.WriteLine(json);
+ }
+
+ private void OutputReport(ReplayReport report)
+ {
+ var json = JsonSerializer.Serialize(report, new JsonSerializerOptions { WriteIndented = true });
+ _console.WriteLine(json);
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `ReplayCommand.cs` created in CLI
+- [ ] `stella replay --verdict ` command works
+- [ ] `stella replay --artifact --snapshot ` works
+- [ ] `--allow-network` flag for network fetch
+- [ ] Multiple output formats (text, json, report)
+- [ ] Report file export with `--report-file`
+
+---
+
+### T8: Add Golden Replay Tests
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T3, T5
+
+**Description**:
+Add golden tests verifying replay determinism.
+
+**Implementation Path**: `src/Policy/__Tests/StellaOps.Policy.Tests/Replay/`
+
+**Test Cases**:
+```csharp
+public class ReplayEngineTests
+{
+ [Fact]
+ public async Task Replay_SameInputs_ProducesExactMatch()
+ {
+ // Arrange
+ var snapshot = await CreateSnapshotAsync();
+ var originalVerdict = await _evaluator.EvaluateWithSnapshotAsync(CreateRequest(), snapshot);
+ await _verdictStore.SaveAsync(originalVerdict);
+
+ var request = new ReplayRequest
+ {
+ ArtifactDigest = originalVerdict.ArtifactDigest,
+ SnapshotId = snapshot.SnapshotId,
+ OriginalVerdictId = originalVerdict.VerdictId
+ };
+
+ // Act
+ var result = await _replayEngine.ReplayAsync(request);
+
+ // Assert
+ result.MatchStatus.Should().Be(ReplayMatchStatus.ExactMatch);
+ result.ReplayedVerdict.Decision.Should().Be(originalVerdict.Decision);
+ result.ReplayedVerdict.Score.Should().Be(originalVerdict.Score);
+ }
+
+ [Fact]
+ public async Task Replay_MissingSource_FailsGracefully()
+ {
+ // Arrange
+ var snapshot = CreateSnapshotWithMissingSource();
+ var request = new ReplayRequest
+ {
+ ArtifactDigest = "sha256:abc",
+ SnapshotId = snapshot.SnapshotId,
+ Options = new ReplayOptions { AllowNetworkFetch = false }
+ };
+
+ // Act
+ var result = await _replayEngine.ReplayAsync(request);
+
+ // Assert
+ result.MatchStatus.Should().Be(ReplayMatchStatus.ReplayFailed);
+ result.DeltaReport?.SuspectedCauses.Should().Contain("Missing inputs");
+ }
+
+ [Fact]
+ public async Task Replay_DifferentAdvisoryData_DetectsMismatch()
+ {
+ // Arrange
+ var originalSnapshot = await CreateSnapshotAsync();
+ var originalVerdict = await _evaluator.EvaluateWithSnapshotAsync(CreateRequest(), originalSnapshot);
+
+ // Create new snapshot with different advisory data
+ var newSnapshot = await CreateSnapshotWithUpdatedAdvisoriesAsync();
+
+ var request = new ReplayRequest
+ {
+ ArtifactDigest = originalVerdict.ArtifactDigest,
+ SnapshotId = newSnapshot.SnapshotId,
+ OriginalVerdictId = originalVerdict.VerdictId
+ };
+
+ // Act
+ var result = await _replayEngine.ReplayAsync(request);
+
+ // Assert
+ result.MatchStatus.Should().Be(ReplayMatchStatus.Mismatch);
+ result.DeltaReport.Should().NotBeNull();
+ }
+
+ [Fact]
+ public async Task Replay_100Iterations_AllDeterministic()
+ {
+ // Arrange
+ var snapshot = await CreateSnapshotAsync();
+ var request = new ReplayRequest
+ {
+ ArtifactDigest = "sha256:test",
+ SnapshotId = snapshot.SnapshotId
+ };
+
+ // Act
+ var results = new List();
+ for (int i = 0; i < 100; i++)
+ {
+ results.Add(await _replayEngine.ReplayAsync(request));
+ }
+
+ // Assert
+ var firstScore = results[0].ReplayedVerdict.Score;
+ var firstDecision = results[0].ReplayedVerdict.Decision;
+
+ results.Should().AllSatisfy(r =>
+ {
+ r.ReplayedVerdict.Score.Should().Be(firstScore);
+ r.ReplayedVerdict.Decision.Should().Be(firstDecision);
+ });
+ }
+}
+
+public class VerdictComparerTests
+{
+ [Fact]
+ public void Compare_IdenticalVerdicts_ReturnsExactMatch()
+ {
+ var verdict = CreateVerdict(decision: PolicyDecision.Pass, score: 85.5m);
+
+ var result = _comparer.Compare(verdict, verdict, VerdictComparisonOptions.Default);
+
+ result.MatchStatus.Should().Be(ReplayMatchStatus.ExactMatch);
+ result.IsDeterministic.Should().BeTrue();
+ result.DeterminismConfidence.Should().Be(1.0m);
+ }
+
+ [Fact]
+ public void Compare_DifferentDecisions_ReturnsMismatch()
+ {
+ var original = CreateVerdict(decision: PolicyDecision.Pass);
+ var replayed = CreateVerdict(decision: PolicyDecision.Fail);
+
+ var result = _comparer.Compare(replayed, original, VerdictComparisonOptions.Default);
+
+ result.MatchStatus.Should().Be(ReplayMatchStatus.Mismatch);
+ result.Differences.Should().Contain(d => d.Field == "Decision");
+ }
+
+ [Fact]
+ public void Compare_ScoreWithinTolerance_ReturnsMatch()
+ {
+ var original = CreateVerdict(score: 85.5000m);
+ var replayed = CreateVerdict(score: 85.5005m);
+
+ var result = _comparer.Compare(replayed, original,
+ new VerdictComparisonOptions { ScoreTolerance = 0.001m });
+
+ result.MatchStatus.Should().Be(ReplayMatchStatus.MatchWithinTolerance);
+ }
+
+ [Fact]
+ public void Compare_DifferentFindings_DetectsChanges()
+ {
+ var original = CreateVerdictWithFindings("CVE-2024-001", "CVE-2024-002");
+ var replayed = CreateVerdictWithFindings("CVE-2024-001", "CVE-2024-003");
+
+ var result = _comparer.Compare(replayed, original, VerdictComparisonOptions.Default);
+
+ result.MatchStatus.Should().Be(ReplayMatchStatus.Mismatch);
+ result.Differences.Should().Contain(d => d.Field == "Finding:CVE-2024-002");
+ result.Differences.Should().Contain(d => d.Field == "Finding:CVE-2024-003");
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] Test for exact match with same inputs
+- [ ] Test for failure with missing sources
+- [ ] Test for mismatch detection with different advisories
+- [ ] Stress test: 100 iterations all deterministic
+- [ ] Verdict comparer tests for all cases
+- [ ] All 10+ golden tests pass
+
+---
+
+## Delivery Tracker
+
+| # | Task ID | Status | Dependency | Owners | Task Definition |
+|---|---------|--------|------------|--------|-----------------|
+| 1 | T1 | TODO | — | Policy Team | Define ReplayRequest |
+| 2 | T2 | TODO | T1 | Policy Team | Define ReplayResult |
+| 3 | T3 | TODO | T1, T2 | Policy Team | Create ReplayEngine service |
+| 4 | T4 | TODO | T3 | Policy Team | Implement input resolution |
+| 5 | T5 | TODO | T3 | Policy Team | Implement comparison logic |
+| 6 | T6 | TODO | T5 | Policy Team | Create ReplayReport |
+| 7 | T7 | TODO | T3, T6 | CLI Team | Add CLI command |
+| 8 | T8 | TODO | T3, T5 | Policy Team | Add golden replay tests |
+
+---
+
+## Execution Log
+
+| Date (UTC) | Update | Owner |
+|------------|--------|-------|
+| 2025-12-21 | Sprint created from MOAT Phase 2 gap analysis. Replay Engine identified as requirement from Knowledge Snapshots advisory. | Claude |
+
+---
+
+## Decisions & Risks
+
+| Item | Type | Owner | Notes |
+|------|------|-------|-------|
+| Score tolerance | Decision | Policy Team | 0.001 default allows for floating point variance |
+| Network fetch default | Decision | Policy Team | Disabled by default for air-gap safety |
+| Determinism confidence | Decision | Policy Team | Simple penalty-based calculation; tune with data |
+| Source resolution order | Decision | Policy Team | Bundled → local → network for performance/offline |
+
+---
+
+## Success Criteria
+
+- [ ] All 8 tasks marked DONE
+- [ ] Replay produces exact match for same inputs
+- [ ] Missing sources handled gracefully
+- [ ] Detailed delta reports generated
+- [ ] CLI command works with --verdict and --snapshot
+- [ ] 10+ golden replay tests passing
+- [ ] `dotnet build` succeeds
+- [ ] `dotnet test` succeeds
diff --git a/docs/implplan/SPRINT_4100_0002_0003_snapshot_export_import.md b/docs/implplan/SPRINT_4100_0002_0003_snapshot_export_import.md
new file mode 100644
index 000000000..a077d4baf
--- /dev/null
+++ b/docs/implplan/SPRINT_4100_0002_0003_snapshot_export_import.md
@@ -0,0 +1,1180 @@
+# Sprint 4100.0002.0003 · Snapshot Export/Import
+
+## Topic & Scope
+
+- Enable portable snapshot bundles for air-gapped replay
+- Implement export with selectable inclusion levels
+- Implement import with integrity verification
+
+**Working directory:** `src/ExportCenter/`
+
+## Dependencies & Concurrency
+
+- **Upstream**: Sprint 4100.0002.0001 (Knowledge Snapshot Manifest) — MUST BE DONE
+- **Downstream**: None
+- **Safe to parallelize with**: Sprint 4100.0001.0003, Sprint 4100.0004.0001
+
+## Documentation Prerequisites
+
+- Sprint 4100.0002.0001 completion (KnowledgeSnapshotManifest, KnowledgeSourceDescriptor)
+- `docs/product-advisories/20-Dec-2025 - Moat Explanation - Knowledge Snapshots and Time‑Travel Replay.md`
+- `docs/24_OFFLINE_KIT.md`
+
+---
+
+## Tasks
+
+### T1: Define SnapshotBundle Format
+
+**Assignee**: ExportCenter Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Define the ZIP bundle structure for portable snapshots.
+
+**Implementation Path**: `StellaOps.ExportCenter/Snapshots/SnapshotBundle.cs` (new file)
+
+**Bundle Structure**:
+```
+snapshot-{id}.zip
+├── manifest.json # KnowledgeSnapshotManifest
+├── manifest.dsse.json # DSSE-signed envelope of manifest
+├── sources/
+│ ├── nvd-2025-12-21.jsonl.gz
+│ ├── osv-2025-12-21.jsonl.gz
+│ ├── vex-sha256-abc.json
+│ └── ...
+├── policy/
+│ └── policy-sha256-xyz.json
+├── scoring/
+│ └── scoring-sha256-def.json
+├── trust/
+│ └── trust-sha256-ghi.pem
+└── META/
+ ├── BUNDLE_INFO.json # Bundle metadata
+ └── CHECKSUMS.sha256 # All file checksums
+```
+
+**Model Definition**:
+```csharp
+namespace StellaOps.ExportCenter.Snapshots;
+
+///
+/// Represents a portable snapshot bundle.
+///
+public sealed record SnapshotBundle
+{
+ ///
+ /// The snapshot manifest.
+ ///
+ public required KnowledgeSnapshotManifest Manifest { get; init; }
+
+ ///
+ /// Signed envelope of the manifest (if sealed).
+ ///
+ public string? SignedEnvelope { get; init; }
+
+ ///
+ /// Bundle metadata.
+ ///
+ public required BundleInfo Info { get; init; }
+
+ ///
+ /// Source files included in the bundle.
+ ///
+ public required IReadOnlyList Sources { get; init; }
+
+ ///
+ /// Policy bundle file.
+ ///
+ public BundledFile? Policy { get; init; }
+
+ ///
+ /// Scoring rules file.
+ ///
+ public BundledFile? Scoring { get; init; }
+
+ ///
+ /// Trust bundle file.
+ ///
+ public BundledFile? Trust { get; init; }
+}
+
+///
+/// Metadata about the bundle.
+///
+public sealed record BundleInfo
+{
+ public required string BundleId { get; init; }
+ public required DateTimeOffset CreatedAt { get; init; }
+ public required string CreatedBy { get; init; }
+ public required SnapshotInclusionLevel InclusionLevel { get; init; }
+ public required long TotalSizeBytes { get; init; }
+ public required int FileCount { get; init; }
+ public string? Description { get; init; }
+}
+
+///
+/// A file included in the bundle.
+///
+public sealed record BundledFile(
+ string Path,
+ string Digest,
+ long SizeBytes,
+ bool IsCompressed);
+
+///
+/// Level of content inclusion in the bundle.
+///
+public enum SnapshotInclusionLevel
+{
+ ///
+ /// Only manifest with content digests (requires network for replay).
+ ///
+ ReferenceOnly,
+
+ ///
+ /// Manifest plus essential sources for offline replay.
+ ///
+ Portable,
+
+ ///
+ /// Full bundle with all sources, sealed and signed.
+ ///
+ Sealed
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `SnapshotBundle.cs` created with all models
+- [ ] ZIP structure documented
+- [ ] Three inclusion levels defined
+- [ ] Checksums file format specified
+- [ ] All paths are relative within bundle
+
+---
+
+### T2: Implement ExportSnapshotService
+
+**Assignee**: ExportCenter Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Implement service to create portable snapshot bundles.
+
+**Implementation Path**: `StellaOps.ExportCenter/Snapshots/ExportSnapshotService.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.ExportCenter.Snapshots;
+
+///
+/// Service for exporting snapshots to portable bundles.
+///
+public sealed class ExportSnapshotService : IExportSnapshotService
+{
+ private readonly ISnapshotService _snapshotService;
+ private readonly IKnowledgeSourceResolver _sourceResolver;
+ private readonly ISigner _signer;
+ private readonly ILogger _logger;
+
+ public async Task ExportAsync(
+ string snapshotId,
+ ExportOptions options,
+ CancellationToken ct = default)
+ {
+ _logger.LogInformation("Exporting snapshot {SnapshotId} with level {Level}",
+ snapshotId, options.InclusionLevel);
+
+ // Load snapshot
+ var snapshot = await _snapshotService.GetSnapshotAsync(snapshotId, ct);
+ if (snapshot is null)
+ return ExportResult.Fail($"Snapshot {snapshotId} not found");
+
+ // Create temp directory for bundle assembly
+ var tempDir = Path.Combine(Path.GetTempPath(), $"snapshot-export-{Guid.NewGuid():N}");
+ Directory.CreateDirectory(tempDir);
+
+ try
+ {
+ // Write manifest
+ await WriteManifestAsync(tempDir, snapshot, ct);
+
+ // Bundle sources based on inclusion level
+ var bundledFiles = new List();
+ if (options.InclusionLevel != SnapshotInclusionLevel.ReferenceOnly)
+ {
+ bundledFiles = await BundleSourcesAsync(tempDir, snapshot, options, ct);
+ }
+
+ // Bundle policy and scoring
+ if (options.IncludePolicy)
+ {
+ var policyFile = await BundlePolicyAsync(tempDir, snapshot.Policy, ct);
+ if (policyFile is not null)
+ bundledFiles.Add(policyFile);
+ }
+
+ // Write checksums
+ await WriteChecksumsAsync(tempDir, bundledFiles, ct);
+
+ // Create bundle info
+ var bundleInfo = new BundleInfo
+ {
+ BundleId = $"bundle:{Guid.NewGuid():N}",
+ CreatedAt = DateTimeOffset.UtcNow,
+ CreatedBy = options.CreatedBy ?? "StellaOps",
+ InclusionLevel = options.InclusionLevel,
+ TotalSizeBytes = bundledFiles.Sum(f => f.SizeBytes),
+ FileCount = bundledFiles.Count,
+ Description = options.Description
+ };
+
+ await WriteBundleInfoAsync(tempDir, bundleInfo, ct);
+
+ // Create ZIP
+ var zipPath = options.OutputPath ?? Path.Combine(
+ Path.GetTempPath(),
+ $"snapshot-{snapshot.SnapshotId.Split(':').Last()[..12]}.zip");
+
+ ZipFile.CreateFromDirectory(tempDir, zipPath, CompressionLevel.Optimal, false);
+
+ _logger.LogInformation("Exported snapshot to {ZipPath}", zipPath);
+
+ return ExportResult.Success(zipPath, bundleInfo);
+ }
+ finally
+ {
+ // Cleanup temp directory
+ if (Directory.Exists(tempDir))
+ Directory.Delete(tempDir, true);
+ }
+ }
+
+ private async Task WriteManifestAsync(
+ string tempDir, KnowledgeSnapshotManifest manifest, CancellationToken ct)
+ {
+ var manifestPath = Path.Combine(tempDir, "manifest.json");
+ var json = JsonSerializer.Serialize(manifest, new JsonSerializerOptions { WriteIndented = true });
+ await File.WriteAllTextAsync(manifestPath, json, ct);
+
+ // Write signed envelope if signature present
+ if (manifest.Signature is not null)
+ {
+ var envelopePath = Path.Combine(tempDir, "manifest.dsse.json");
+ var envelope = CreateDsseEnvelope(manifest);
+ await File.WriteAllTextAsync(envelopePath, envelope, ct);
+ }
+ }
+
+ private async Task> BundleSourcesAsync(
+ string tempDir, KnowledgeSnapshotManifest manifest, ExportOptions options, CancellationToken ct)
+ {
+ var sourcesDir = Path.Combine(tempDir, "sources");
+ Directory.CreateDirectory(sourcesDir);
+
+ var bundledFiles = new List();
+
+ foreach (var source in manifest.Sources)
+ {
+ // Resolve source content
+ var resolved = await _sourceResolver.ResolveAsync(source, true, ct);
+ if (resolved is null)
+ {
+ _logger.LogWarning("Could not resolve source {Name} for bundling", source.Name);
+ continue;
+ }
+
+ // Determine file path
+ var fileName = $"{source.Name}-{source.Epoch}.{GetExtension(source.Type)}";
+ var filePath = Path.Combine(sourcesDir, fileName);
+
+ // Compress if option enabled
+ if (options.CompressSources)
+ {
+ filePath += ".gz";
+ await using var fs = File.Create(filePath);
+ await using var gz = new GZipStream(fs, CompressionLevel.Optimal);
+ await gz.WriteAsync(resolved.Content, ct);
+ }
+ else
+ {
+ await File.WriteAllBytesAsync(filePath, resolved.Content, ct);
+ }
+
+ bundledFiles.Add(new BundledFile(
+ Path: $"sources/{Path.GetFileName(filePath)}",
+ Digest: source.Digest,
+ SizeBytes: new FileInfo(filePath).Length,
+ IsCompressed: options.CompressSources));
+ }
+
+ return bundledFiles;
+ }
+
+ private async Task WriteChecksumsAsync(
+ string tempDir, List files, CancellationToken ct)
+ {
+ var metaDir = Path.Combine(tempDir, "META");
+ Directory.CreateDirectory(metaDir);
+
+ var checksums = string.Join("\n", files.Select(f => $"{f.Digest} {f.Path}"));
+ await File.WriteAllTextAsync(Path.Combine(metaDir, "CHECKSUMS.sha256"), checksums, ct);
+ }
+
+ private async Task WriteBundleInfoAsync(
+ string tempDir, BundleInfo info, CancellationToken ct)
+ {
+ var metaDir = Path.Combine(tempDir, "META");
+ Directory.CreateDirectory(metaDir);
+
+ var json = JsonSerializer.Serialize(info, new JsonSerializerOptions { WriteIndented = true });
+ await File.WriteAllTextAsync(Path.Combine(metaDir, "BUNDLE_INFO.json"), json, ct);
+ }
+
+ private static string GetExtension(string sourceType) =>
+ sourceType switch
+ {
+ "advisory-feed" => "jsonl",
+ "vex" => "json",
+ "sbom" => "json",
+ _ => "bin"
+ };
+}
+
+public sealed record ExportOptions
+{
+ public SnapshotInclusionLevel InclusionLevel { get; init; } = SnapshotInclusionLevel.Portable;
+ public bool CompressSources { get; init; } = true;
+ public bool IncludePolicy { get; init; } = true;
+ public bool IncludeScoring { get; init; } = true;
+ public bool IncludeTrust { get; init; } = true;
+ public string? OutputPath { get; init; }
+ public string? CreatedBy { get; init; }
+ public string? Description { get; init; }
+}
+
+public sealed record ExportResult
+{
+ public bool IsSuccess { get; init; }
+ public string? FilePath { get; init; }
+ public BundleInfo? BundleInfo { get; init; }
+ public string? Error { get; init; }
+
+ public static ExportResult Success(string filePath, BundleInfo info) =>
+ new() { IsSuccess = true, FilePath = filePath, BundleInfo = info };
+
+ public static ExportResult Fail(string error) =>
+ new() { IsSuccess = false, Error = error };
+}
+
+public interface IExportSnapshotService
+{
+ Task ExportAsync(string snapshotId, ExportOptions options, CancellationToken ct = default);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `ExportSnapshotService.cs` created
+- [ ] Manifest and signed envelope written
+- [ ] Sources bundled with optional compression
+- [ ] Checksums file generated
+- [ ] Bundle info metadata written
+- [ ] ZIP creation with cleanup
+
+---
+
+### T3: Implement ImportSnapshotService
+
+**Assignee**: ExportCenter Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Implement service to import snapshot bundles with integrity verification.
+
+**Implementation Path**: `StellaOps.ExportCenter/Snapshots/ImportSnapshotService.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.ExportCenter.Snapshots;
+
+///
+/// Service for importing snapshot bundles.
+///
+public sealed class ImportSnapshotService : IImportSnapshotService
+{
+ private readonly ISnapshotService _snapshotService;
+ private readonly ISnapshotStore _snapshotStore;
+ private readonly IKnowledgeSourceStore _sourceStore;
+ private readonly ILogger _logger;
+
+ public async Task ImportAsync(
+ string bundlePath,
+ ImportOptions options,
+ CancellationToken ct = default)
+ {
+ _logger.LogInformation("Importing snapshot bundle from {Path}", bundlePath);
+
+ // Validate bundle exists
+ if (!File.Exists(bundlePath))
+ return ImportResult.Fail($"Bundle not found: {bundlePath}");
+
+ // Extract to temp directory
+ var tempDir = Path.Combine(Path.GetTempPath(), $"snapshot-import-{Guid.NewGuid():N}");
+
+ try
+ {
+ ZipFile.ExtractToDirectory(bundlePath, tempDir);
+
+ // Verify checksums first
+ if (options.VerifyChecksums)
+ {
+ var checksumResult = await VerifyChecksumsAsync(tempDir, ct);
+ if (!checksumResult.IsValid)
+ {
+ return ImportResult.Fail($"Checksum verification failed: {checksumResult.Error}");
+ }
+ }
+
+ // Load manifest
+ var manifestPath = Path.Combine(tempDir, "manifest.json");
+ if (!File.Exists(manifestPath))
+ return ImportResult.Fail("Bundle missing manifest.json");
+
+ var manifestJson = await File.ReadAllTextAsync(manifestPath, ct);
+ var manifest = JsonSerializer.Deserialize(manifestJson)
+ ?? throw new InvalidOperationException("Failed to parse manifest");
+
+ // Verify manifest signature if sealed
+ if (options.VerifySignature)
+ {
+ var envelopePath = Path.Combine(tempDir, "manifest.dsse.json");
+ if (File.Exists(envelopePath))
+ {
+ var verification = await VerifySignatureAsync(envelopePath, ct);
+ if (!verification.IsValid)
+ {
+ return ImportResult.Fail($"Signature verification failed: {verification.Error}");
+ }
+ }
+ }
+
+ // Verify content-addressed ID
+ var idVerification = await _snapshotService.VerifySnapshotAsync(manifest, ct);
+ if (!idVerification.IsValid)
+ {
+ return ImportResult.Fail($"Manifest ID verification failed: {idVerification.Error}");
+ }
+
+ // Check for conflicts
+ var existing = await _snapshotStore.GetAsync(manifest.SnapshotId, ct);
+ if (existing is not null && !options.OverwriteExisting)
+ {
+ return ImportResult.Fail($"Snapshot {manifest.SnapshotId} already exists");
+ }
+
+ // Import sources
+ var importedSources = 0;
+ var sourcesDir = Path.Combine(tempDir, "sources");
+ if (Directory.Exists(sourcesDir))
+ {
+ foreach (var sourceFile in Directory.GetFiles(sourcesDir))
+ {
+ await ImportSourceFileAsync(sourceFile, manifest, ct);
+ importedSources++;
+ }
+ }
+
+ // Save manifest
+ await _snapshotStore.SaveAsync(manifest, ct);
+
+ _logger.LogInformation(
+ "Imported snapshot {SnapshotId} with {SourceCount} sources",
+ manifest.SnapshotId, importedSources);
+
+ return ImportResult.Success(manifest, importedSources);
+ }
+ finally
+ {
+ // Cleanup temp directory
+ if (Directory.Exists(tempDir))
+ Directory.Delete(tempDir, true);
+ }
+ }
+
+ private async Task VerifyChecksumsAsync(string tempDir, CancellationToken ct)
+ {
+ var checksumsPath = Path.Combine(tempDir, "META", "CHECKSUMS.sha256");
+ if (!File.Exists(checksumsPath))
+ return VerificationResult.Valid();
+
+ var lines = await File.ReadAllLinesAsync(checksumsPath, ct);
+ foreach (var line in lines)
+ {
+ var parts = line.Split(" ", 2);
+ if (parts.Length != 2) continue;
+
+ var expectedDigest = parts[0];
+ var filePath = Path.Combine(tempDir, parts[1]);
+
+ if (!File.Exists(filePath))
+ {
+ return VerificationResult.Invalid($"Missing file: {parts[1]}");
+ }
+
+ var actualDigest = await ComputeFileDigestAsync(filePath, ct);
+ if (actualDigest != expectedDigest)
+ {
+ return VerificationResult.Invalid($"Digest mismatch for {parts[1]}");
+ }
+ }
+
+ return VerificationResult.Valid();
+ }
+
+ private async Task ComputeFileDigestAsync(string filePath, CancellationToken ct)
+ {
+ await using var fs = File.OpenRead(filePath);
+ var hash = await SHA256.HashDataAsync(fs, ct);
+ return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
+ }
+
+ private async Task VerifySignatureAsync(string envelopePath, CancellationToken ct)
+ {
+ // Delegate to signer for DSSE verification
+ var envelope = await File.ReadAllTextAsync(envelopePath, ct);
+ // ... signature verification logic
+ return VerificationResult.Valid();
+ }
+
+ private async Task ImportSourceFileAsync(
+ string filePath, KnowledgeSnapshotManifest manifest, CancellationToken ct)
+ {
+ var fileName = Path.GetFileName(filePath);
+
+ // Decompress if needed
+ byte[] content;
+ if (filePath.EndsWith(".gz"))
+ {
+ await using var fs = File.OpenRead(filePath);
+ await using var gz = new GZipStream(fs, CompressionMode.Decompress);
+ using var ms = new MemoryStream();
+ await gz.CopyToAsync(ms, ct);
+ content = ms.ToArray();
+ }
+ else
+ {
+ content = await File.ReadAllBytesAsync(filePath, ct);
+ }
+
+ // Find matching source descriptor
+ var digest = ComputeDigest(content);
+ var sourceDescriptor = manifest.Sources.FirstOrDefault(s => s.Digest == digest);
+
+ if (sourceDescriptor is not null)
+ {
+ await _sourceStore.StoreAsync(sourceDescriptor, content, ct);
+ }
+ }
+
+ private static string ComputeDigest(byte[] content)
+ {
+ var hash = SHA256.HashData(content);
+ return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
+ }
+}
+
+public sealed record ImportOptions
+{
+ public bool VerifyChecksums { get; init; } = true;
+ public bool VerifySignature { get; init; } = true;
+ public bool OverwriteExisting { get; init; } = false;
+}
+
+public sealed record ImportResult
+{
+ public bool IsSuccess { get; init; }
+ public KnowledgeSnapshotManifest? Manifest { get; init; }
+ public int ImportedSourceCount { get; init; }
+ public string? Error { get; init; }
+
+ public static ImportResult Success(KnowledgeSnapshotManifest manifest, int sourceCount) =>
+ new() { IsSuccess = true, Manifest = manifest, ImportedSourceCount = sourceCount };
+
+ public static ImportResult Fail(string error) =>
+ new() { IsSuccess = false, Error = error };
+}
+
+public sealed record VerificationResult(bool IsValid, string? Error)
+{
+ public static VerificationResult Valid() => new(true, null);
+ public static VerificationResult Invalid(string error) => new(false, error);
+}
+
+public interface IImportSnapshotService
+{
+ Task ImportAsync(string bundlePath, ImportOptions options, CancellationToken ct = default);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `ImportSnapshotService.cs` created
+- [ ] ZIP extraction to temp directory
+- [ ] Checksum verification of all files
+- [ ] Signature verification for sealed bundles
+- [ ] Content-addressed ID verification
+- [ ] Conflict detection with overwrite option
+- [ ] Source files imported and stored
+- [ ] Cleanup on completion/failure
+
+---
+
+### T4: Add Snapshot Levels
+
+**Assignee**: ExportCenter Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Implement behavior differences for the three inclusion levels.
+
+**Implementation Path**: `StellaOps.ExportCenter/Snapshots/SnapshotLevelHandler.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.ExportCenter.Snapshots;
+
+///
+/// Handles snapshot level-specific behavior.
+///
+public sealed class SnapshotLevelHandler
+{
+ ///
+ /// Gets the default export options for a given inclusion level.
+ ///
+ public ExportOptions GetDefaultOptions(SnapshotInclusionLevel level)
+ {
+ return level switch
+ {
+ SnapshotInclusionLevel.ReferenceOnly => new ExportOptions
+ {
+ InclusionLevel = level,
+ CompressSources = false,
+ IncludePolicy = false,
+ IncludeScoring = false,
+ IncludeTrust = false
+ },
+
+ SnapshotInclusionLevel.Portable => new ExportOptions
+ {
+ InclusionLevel = level,
+ CompressSources = true,
+ IncludePolicy = true,
+ IncludeScoring = true,
+ IncludeTrust = false
+ },
+
+ SnapshotInclusionLevel.Sealed => new ExportOptions
+ {
+ InclusionLevel = level,
+ CompressSources = true,
+ IncludePolicy = true,
+ IncludeScoring = true,
+ IncludeTrust = true
+ },
+
+ _ => throw new ArgumentOutOfRangeException(nameof(level))
+ };
+ }
+
+ ///
+ /// Validates that a snapshot can be exported at the requested level.
+ ///
+ public ValidationResult ValidateForExport(
+ KnowledgeSnapshotManifest manifest,
+ SnapshotInclusionLevel level)
+ {
+ var issues = new List();
+
+ // Sealed level requires signature
+ if (level == SnapshotInclusionLevel.Sealed && manifest.Signature is null)
+ {
+ issues.Add("Sealed export requires signed manifest. Seal the snapshot first.");
+ }
+
+ // Portable and Sealed require bundled sources
+ if (level != SnapshotInclusionLevel.ReferenceOnly)
+ {
+ var referencedOnly = manifest.Sources
+ .Where(s => s.InclusionMode == SourceInclusionMode.Referenced)
+ .ToList();
+
+ if (referencedOnly.Count > 0)
+ {
+ issues.Add($"{referencedOnly.Count} sources are reference-only and cannot be bundled without network access");
+ }
+ }
+
+ return issues.Count == 0
+ ? ValidationResult.Valid()
+ : ValidationResult.Invalid(issues);
+ }
+
+ ///
+ /// Gets the minimum requirements for replay at each level.
+ ///
+ public ReplayRequirements GetReplayRequirements(SnapshotInclusionLevel level)
+ {
+ return level switch
+ {
+ SnapshotInclusionLevel.ReferenceOnly => new ReplayRequirements
+ {
+ RequiresNetwork = true,
+ RequiresLocalStore = true,
+ RequiresTrustBundle = false,
+ Description = "Requires network access to fetch sources by digest"
+ },
+
+ SnapshotInclusionLevel.Portable => new ReplayRequirements
+ {
+ RequiresNetwork = false,
+ RequiresLocalStore = false,
+ RequiresTrustBundle = false,
+ Description = "Fully offline replay possible"
+ },
+
+ SnapshotInclusionLevel.Sealed => new ReplayRequirements
+ {
+ RequiresNetwork = false,
+ RequiresLocalStore = false,
+ RequiresTrustBundle = true,
+ Description = "Fully offline replay with cryptographic verification"
+ },
+
+ _ => throw new ArgumentOutOfRangeException(nameof(level))
+ };
+ }
+}
+
+public sealed record ValidationResult
+{
+ public bool IsValid { get; init; }
+ public IReadOnlyList Issues { get; init; } = [];
+
+ public static ValidationResult Valid() => new() { IsValid = true };
+ public static ValidationResult Invalid(IReadOnlyList issues) =>
+ new() { IsValid = false, Issues = issues };
+}
+
+public sealed record ReplayRequirements
+{
+ public bool RequiresNetwork { get; init; }
+ public bool RequiresLocalStore { get; init; }
+ public bool RequiresTrustBundle { get; init; }
+ public required string Description { get; init; }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `SnapshotLevelHandler.cs` created
+- [ ] Default options per level defined
+- [ ] Export validation per level
+- [ ] Replay requirements documented
+- [ ] Sealed requires signature
+- [ ] Portable requires bundled sources
+
+---
+
+### T5: Integrate with CLI
+
+**Assignee**: CLI Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T2, T3
+
+**Description**:
+Add CLI commands for snapshot export and import.
+
+**Implementation Path**: `src/Cli/StellaOps.Cli/Commands/SnapshotCommand.cs`
+
+**Implementation**:
+```csharp
+namespace StellaOps.Cli.Commands;
+
+[Command("snapshot", Description = "Manage knowledge snapshots")]
+public class SnapshotCommand
+{
+ [Command("export", Description = "Export a snapshot to a portable bundle")]
+ public class ExportCommand : ICommand
+ {
+ [Argument(0, Description = "Snapshot ID to export")]
+ public required string SnapshotId { get; set; }
+
+ [Option("-o|--output", Description = "Output file path")]
+ public string? OutputPath { get; set; }
+
+ [Option("-l|--level", Description = "Inclusion level: reference, portable, sealed")]
+ public string Level { get; set; } = "portable";
+
+ [Option("--no-compress", Description = "Disable source compression")]
+ public bool NoCompress { get; set; }
+
+ [Option("--description", Description = "Bundle description")]
+ public string? Description { get; set; }
+
+ private readonly IExportSnapshotService _exportService;
+ private readonly IConsole _console;
+
+ public async Task ExecuteAsync(CancellationToken ct)
+ {
+ var inclusionLevel = Level.ToLowerInvariant() switch
+ {
+ "reference" => SnapshotInclusionLevel.ReferenceOnly,
+ "portable" => SnapshotInclusionLevel.Portable,
+ "sealed" => SnapshotInclusionLevel.Sealed,
+ _ => throw new ArgumentException($"Unknown level: {Level}")
+ };
+
+ var options = new ExportOptions
+ {
+ InclusionLevel = inclusionLevel,
+ CompressSources = !NoCompress,
+ OutputPath = OutputPath,
+ Description = Description
+ };
+
+ _console.WriteLine($"Exporting snapshot {SnapshotId} as {Level}...");
+
+ var result = await _exportService.ExportAsync(SnapshotId, options, ct);
+
+ if (result.IsSuccess)
+ {
+ _console.WriteLine($"Exported to: {result.FilePath}");
+ _console.WriteLine($"Bundle size: {FormatSize(result.BundleInfo!.TotalSizeBytes)}");
+ _console.WriteLine($"Files: {result.BundleInfo.FileCount}");
+ }
+ else
+ {
+ _console.WriteError($"Export failed: {result.Error}");
+ }
+ }
+
+ private static string FormatSize(long bytes)
+ {
+ string[] sizes = { "B", "KB", "MB", "GB" };
+ int order = 0;
+ double size = bytes;
+ while (size >= 1024 && order < sizes.Length - 1)
+ {
+ order++;
+ size /= 1024;
+ }
+ return $"{size:0.##} {sizes[order]}";
+ }
+ }
+
+ [Command("import", Description = "Import a snapshot bundle")]
+ public class ImportCommand : ICommand
+ {
+ [Argument(0, Description = "Path to bundle ZIP file")]
+ public required string BundlePath { get; set; }
+
+ [Option("--no-verify", Description = "Skip checksum and signature verification")]
+ public bool NoVerify { get; set; }
+
+ [Option("--overwrite", Description = "Overwrite existing snapshot")]
+ public bool Overwrite { get; set; }
+
+ private readonly IImportSnapshotService _importService;
+ private readonly IConsole _console;
+
+ public async Task ExecuteAsync(CancellationToken ct)
+ {
+ var options = new ImportOptions
+ {
+ VerifyChecksums = !NoVerify,
+ VerifySignature = !NoVerify,
+ OverwriteExisting = Overwrite
+ };
+
+ _console.WriteLine($"Importing bundle from {BundlePath}...");
+
+ var result = await _importService.ImportAsync(BundlePath, options, ct);
+
+ if (result.IsSuccess)
+ {
+ _console.WriteLine($"Imported snapshot: {result.Manifest!.SnapshotId}");
+ _console.WriteLine($"Sources imported: {result.ImportedSourceCount}");
+ }
+ else
+ {
+ _console.WriteError($"Import failed: {result.Error}");
+ }
+ }
+ }
+
+ [Command("list", Description = "List available snapshots")]
+ public class ListCommand : ICommand
+ {
+ [Option("--format", Description = "Output format: table, json")]
+ public string Format { get; set; } = "table";
+
+ private readonly ISnapshotStore _store;
+ private readonly IConsole _console;
+
+ public async Task ExecuteAsync(CancellationToken ct)
+ {
+ var snapshots = await _store.ListAsync(ct);
+
+ if (Format == "json")
+ {
+ var json = JsonSerializer.Serialize(snapshots, new JsonSerializerOptions { WriteIndented = true });
+ _console.WriteLine(json);
+ }
+ else
+ {
+ _console.WriteLine("ID Created Sources");
+ _console.WriteLine("------------------------------------------ ------------------- -------");
+ foreach (var s in snapshots)
+ {
+ _console.WriteLine($"{s.SnapshotId,-42} {s.CreatedAt:yyyy-MM-dd HH:mm} {s.Sources.Count,7}");
+ }
+ }
+ }
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `stella snapshot export ` command works
+- [ ] `stella snapshot import ` command works
+- [ ] `stella snapshot list` command works
+- [ ] Level selection with `--level`
+- [ ] Verification toggle with `--no-verify`
+- [ ] Overwrite option with `--overwrite`
+- [ ] Size and file count reported
+
+---
+
+### T6: Add Air-Gap Tests
+
+**Assignee**: ExportCenter Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T2, T3
+
+**Description**:
+Add tests verifying offline replay with exported bundles.
+
+**Implementation Path**: `src/ExportCenter/__Tests/StellaOps.ExportCenter.Tests/Snapshots/`
+
+**Test Cases**:
+```csharp
+public class ExportSnapshotServiceTests
+{
+ [Fact]
+ public async Task Export_PortableLevel_IncludesSources()
+ {
+ // Arrange
+ var snapshot = await CreateSnapshotWithSourcesAsync();
+ var options = new ExportOptions { InclusionLevel = SnapshotInclusionLevel.Portable };
+
+ // Act
+ var result = await _exportService.ExportAsync(snapshot.SnapshotId, options);
+
+ // Assert
+ result.IsSuccess.Should().BeTrue();
+ File.Exists(result.FilePath).Should().BeTrue();
+
+ using var zip = ZipFile.OpenRead(result.FilePath);
+ zip.Entries.Should().Contain(e => e.FullName.StartsWith("sources/"));
+ zip.Entries.Should().Contain(e => e.Name == "manifest.json");
+ }
+
+ [Fact]
+ public async Task Export_ReferenceLevel_ExcludesSources()
+ {
+ var snapshot = await CreateSnapshotWithSourcesAsync();
+ var options = new ExportOptions { InclusionLevel = SnapshotInclusionLevel.ReferenceOnly };
+
+ var result = await _exportService.ExportAsync(snapshot.SnapshotId, options);
+
+ using var zip = ZipFile.OpenRead(result.FilePath);
+ zip.Entries.Should().NotContain(e => e.FullName.StartsWith("sources/"));
+ }
+
+ [Fact]
+ public async Task Export_GeneratesValidChecksums()
+ {
+ var snapshot = await CreateSnapshotWithSourcesAsync();
+ var result = await _exportService.ExportAsync(snapshot.SnapshotId, new ExportOptions());
+
+ using var zip = ZipFile.OpenRead(result.FilePath);
+ var checksumsEntry = zip.GetEntry("META/CHECKSUMS.sha256");
+ checksumsEntry.Should().NotBeNull();
+ }
+}
+
+public class ImportSnapshotServiceTests
+{
+ [Fact]
+ public async Task Import_ValidBundle_Succeeds()
+ {
+ // Arrange
+ var bundlePath = await CreateTestBundleAsync();
+
+ // Act
+ var result = await _importService.ImportAsync(bundlePath, new ImportOptions());
+
+ // Assert
+ result.IsSuccess.Should().BeTrue();
+ result.Manifest.Should().NotBeNull();
+ }
+
+ [Fact]
+ public async Task Import_TamperedFile_FailsVerification()
+ {
+ var bundlePath = await CreateTestBundleAsync();
+ await TamperWithBundleAsync(bundlePath);
+
+ var result = await _importService.ImportAsync(bundlePath, new ImportOptions { VerifyChecksums = true });
+
+ result.IsSuccess.Should().BeFalse();
+ result.Error.Should().Contain("Checksum");
+ }
+
+ [Fact]
+ public async Task Import_ExistingSnapshot_FailsWithoutOverwrite()
+ {
+ var bundlePath = await CreateTestBundleAsync();
+ await _importService.ImportAsync(bundlePath, new ImportOptions());
+
+ var result = await _importService.ImportAsync(bundlePath, new ImportOptions { OverwriteExisting = false });
+
+ result.IsSuccess.Should().BeFalse();
+ result.Error.Should().Contain("already exists");
+ }
+
+ [Fact]
+ public async Task Import_ExistingSnapshot_SucceedsWithOverwrite()
+ {
+ var bundlePath = await CreateTestBundleAsync();
+ await _importService.ImportAsync(bundlePath, new ImportOptions());
+
+ var result = await _importService.ImportAsync(bundlePath, new ImportOptions { OverwriteExisting = true });
+
+ result.IsSuccess.Should().BeTrue();
+ }
+}
+
+public class AirGapReplayTests
+{
+ [Fact]
+ public async Task FullAirGapWorkflow_ExportImportReplay()
+ {
+ // Step 1: Create snapshot with evaluation
+ var snapshot = await _snapshotService.CaptureCurrentSnapshotAsync();
+ var originalVerdict = await _evaluator.EvaluateWithSnapshotAsync(CreateRequest(), snapshot);
+
+ // Step 2: Export to portable bundle
+ var exportResult = await _exportService.ExportAsync(snapshot.SnapshotId,
+ new ExportOptions { InclusionLevel = SnapshotInclusionLevel.Portable });
+ exportResult.IsSuccess.Should().BeTrue();
+
+ // Step 3: Clear local stores (simulate air-gap transfer)
+ await ClearLocalStoresAsync();
+
+ // Step 4: Import bundle (as if on air-gapped system)
+ var importResult = await _importService.ImportAsync(exportResult.FilePath, new ImportOptions());
+ importResult.IsSuccess.Should().BeTrue();
+
+ // Step 5: Replay without network
+ var replayRequest = new ReplayRequest
+ {
+ ArtifactDigest = originalVerdict.ArtifactDigest,
+ SnapshotId = snapshot.SnapshotId,
+ OriginalVerdictId = originalVerdict.VerdictId,
+ Options = new ReplayOptions { AllowNetworkFetch = false }
+ };
+
+ var replayResult = await _replayEngine.ReplayAsync(replayRequest);
+
+ // Assert: Replay matches original
+ replayResult.MatchStatus.Should().Be(ReplayMatchStatus.ExactMatch);
+ }
+
+ [Fact]
+ public async Task AirGap_SealedBundle_VerifiesSignature()
+ {
+ var snapshot = await CreateAndSealSnapshotAsync();
+ var exportResult = await _exportService.ExportAsync(snapshot.SnapshotId,
+ new ExportOptions { InclusionLevel = SnapshotInclusionLevel.Sealed });
+
+ var importResult = await _importService.ImportAsync(exportResult.FilePath,
+ new ImportOptions { VerifySignature = true });
+
+ importResult.IsSuccess.Should().BeTrue();
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] Export tests for each inclusion level
+- [ ] Import tests for verification scenarios
+- [ ] Tamper detection test
+- [ ] Overwrite behavior tests
+- [ ] Full air-gap workflow test
+- [ ] Sealed bundle signature verification
+- [ ] All 6+ tests pass
+
+---
+
+## Delivery Tracker
+
+| # | Task ID | Status | Dependency | Owners | Task Definition |
+|---|---------|--------|------------|--------|-----------------|
+| 1 | T1 | TODO | — | ExportCenter Team | Define SnapshotBundle format |
+| 2 | T2 | TODO | T1 | ExportCenter Team | Implement ExportSnapshotService |
+| 3 | T3 | TODO | T1 | ExportCenter Team | Implement ImportSnapshotService |
+| 4 | T4 | TODO | T1 | ExportCenter Team | Add snapshot levels |
+| 5 | T5 | TODO | T2, T3 | CLI Team | Integrate with CLI |
+| 6 | T6 | TODO | T2, T3 | ExportCenter Team | Add air-gap tests |
+
+---
+
+## Execution Log
+
+| Date (UTC) | Update | Owner |
+|------------|--------|-------|
+| 2025-12-21 | Sprint created from MOAT Phase 2 gap analysis. Snapshot export/import for air-gap identified as requirement. | Claude |
+
+---
+
+## Decisions & Risks
+
+| Item | Type | Owner | Notes |
+|------|------|-------|-------|
+| ZIP format | Decision | ExportCenter Team | Standard ZIP for broad compatibility |
+| Gzip compression | Decision | ExportCenter Team | Optional per-source compression |
+| Three inclusion levels | Decision | ExportCenter Team | Reference, Portable, Sealed for flexibility |
+| Temp directory cleanup | Decision | ExportCenter Team | Always cleanup even on failure |
+
+---
+
+## Success Criteria
+
+- [ ] All 6 tasks marked DONE
+- [ ] Export creates valid ZIP bundles
+- [ ] Import verifies checksums and signatures
+- [ ] Full air-gap workflow tested
+- [ ] CLI commands work
+- [ ] 6+ tests passing
+- [ ] `dotnet build` succeeds
+- [ ] `dotnet test` succeeds
diff --git a/docs/implplan/SPRINT_4100_0003_0001_risk_verdict_attestation.md b/docs/implplan/SPRINT_4100_0003_0001_risk_verdict_attestation.md
new file mode 100644
index 000000000..57ee47062
--- /dev/null
+++ b/docs/implplan/SPRINT_4100_0003_0001_risk_verdict_attestation.md
@@ -0,0 +1,1325 @@
+# Sprint 4100.0003.0001 · Risk Verdict Attestation Contract
+
+## Topic & Scope
+
+- Define formal Risk Verdict Attestation (RVA) contract
+- Support PASS/FAIL/PASS_WITH_EXCEPTIONS/INDETERMINATE outcomes
+- Enable cryptographically signed, replayable verdicts
+
+**Working directory:** `src/Policy/StellaOps.Policy.Engine/Attestation/`
+
+## Dependencies & Concurrency
+
+- **Upstream**: None (first sprint in batch)
+- **Downstream**: Sprint 4100.0003.0002 (OCI Referrer Push)
+- **Safe to parallelize with**: Sprint 4100.0001.0001, Sprint 4100.0002.0001, Sprint 4100.0004.0002
+
+## Documentation Prerequisites
+
+- `src/Attestor/__Libraries/StellaOps.Attestor.ProofChain/AGENTS.md`
+- `docs/product-advisories/19-Dec-2025 - Moat #2.md` (Risk Verdict Attestation)
+- `docs/product-advisories/20-Dec-2025 - Moat Explanation - Guidelines for Product and Development Managers - Signed, Replayable Risk Verdicts.md`
+
+---
+
+## Tasks
+
+### T1: Define RiskVerdictAttestation Model
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Create the formal RVA model with all required fields for signed verdicts.
+
+**Implementation Path**: `Attestation/RiskVerdictAttestation.cs` (new file)
+
+**Model Definition**:
+```csharp
+namespace StellaOps.Policy.Engine.Attestation;
+
+///
+/// Risk Verdict Attestation - the signed, replayable output of policy evaluation.
+/// This is the formal contract for communicating risk decisions.
+///
+public sealed record RiskVerdictAttestation
+{
+ ///
+ /// Unique identifier for this attestation.
+ /// Format: rva:{sha256-of-content}
+ ///
+ public required string AttestationId { get; init; }
+
+ ///
+ /// Schema version for forward compatibility.
+ ///
+ public string SchemaVersion { get; init; } = "1.0";
+
+ ///
+ /// When this attestation was created.
+ ///
+ public required DateTimeOffset CreatedAt { get; init; }
+
+ ///
+ /// The final verdict status.
+ ///
+ public required RiskVerdictStatus Verdict { get; init; }
+
+ ///
+ /// Subject artifact being evaluated.
+ ///
+ public required ArtifactSubject Subject { get; init; }
+
+ ///
+ /// Reference to the policy that was evaluated.
+ ///
+ public required PolicyRef Policy { get; init; }
+
+ ///
+ /// Reference to the knowledge snapshot used.
+ /// Enables replay with frozen inputs.
+ ///
+ public required string KnowledgeSnapshotId { get; init; }
+
+ ///
+ /// Evidence references supporting the verdict.
+ ///
+ public IReadOnlyList Evidence { get; init; } = [];
+
+ ///
+ /// Reason codes explaining the verdict.
+ ///
+ public IReadOnlyList ReasonCodes { get; init; } = [];
+
+ ///
+ /// Summary of unknowns encountered.
+ ///
+ public UnknownsSummary? Unknowns { get; init; }
+
+ ///
+ /// Exception IDs that were applied.
+ ///
+ public IReadOnlyList AppliedExceptions { get; init; } = [];
+
+ ///
+ /// Human-readable explanation of the verdict.
+ ///
+ public string? Explanation { get; init; }
+
+ ///
+ /// Expiration time for this verdict (optional).
+ ///
+ public DateTimeOffset? ExpiresAt { get; init; }
+
+ ///
+ /// Metadata for extensibility.
+ ///
+ public IReadOnlyDictionary Metadata { get; init; }
+ = new Dictionary();
+}
+
+///
+/// The four possible verdict outcomes.
+///
+public enum RiskVerdictStatus
+{
+ ///
+ /// No policy violations detected. Safe to proceed.
+ ///
+ Pass,
+
+ ///
+ /// Policy violations detected. Block deployment.
+ ///
+ Fail,
+
+ ///
+ /// Violations exist but are covered by approved exceptions.
+ ///
+ PassWithExceptions,
+
+ ///
+ /// Cannot determine risk due to insufficient data.
+ ///
+ Indeterminate
+}
+
+///
+/// The artifact being evaluated.
+///
+public sealed record ArtifactSubject
+{
+ ///
+ /// Artifact digest (sha256:...).
+ ///
+ public required string Digest { get; init; }
+
+ ///
+ /// Artifact type: container-image, sbom, binary, etc.
+ ///
+ public required string Type { get; init; }
+
+ ///
+ /// Human-readable name (e.g., image:tag).
+ ///
+ public string? Name { get; init; }
+
+ ///
+ /// Registry or repository URI.
+ ///
+ public string? Uri { get; init; }
+}
+
+///
+/// Reference to the evaluated policy.
+///
+public sealed record PolicyRef
+{
+ public required string PolicyId { get; init; }
+ public required string Version { get; init; }
+ public required string Digest { get; init; }
+ public string? Uri { get; init; }
+}
+
+///
+/// Reference to evidence supporting the verdict.
+///
+public sealed record EvidenceRef
+{
+ public required string Type { get; init; }
+ public required string Digest { get; init; }
+ public string? Uri { get; init; }
+ public string? Description { get; init; }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `RiskVerdictAttestation.cs` created with all models
+- [ ] Four verdict statuses: Pass, Fail, PassWithExceptions, Indeterminate
+- [ ] Subject, Policy, Snapshot references included
+- [ ] Evidence references for audit trail
+- [ ] Expiration support for time-limited verdicts
+- [ ] Metadata for extensibility
+
+---
+
+### T2: Define VerdictReasonCode Enum
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Create enumeration of structured reason codes for verdicts.
+
+**Implementation Path**: `Attestation/VerdictReasonCode.cs` (new file)
+
+**Model Definition**:
+```csharp
+namespace StellaOps.Policy.Engine.Attestation;
+
+///
+/// Structured reason codes explaining verdict outcomes.
+/// Format: CATEGORY.SUBCATEGORY.DETAIL
+///
+public enum VerdictReasonCode
+{
+ // PASS reasons
+ ///
+ /// No CVEs found in artifact.
+ ///
+ PassNoCves,
+
+ ///
+ /// All CVEs are not reachable.
+ ///
+ PassNotReachable,
+
+ ///
+ /// All CVEs are covered by VEX not_affected statements.
+ ///
+ PassVexNotAffected,
+
+ ///
+ /// All CVEs are below severity threshold.
+ ///
+ PassBelowThreshold,
+
+ // FAIL reasons - CVE
+ ///
+ /// Reachable CVE exceeds severity threshold.
+ ///
+ FailCveReachable,
+
+ ///
+ /// CVE in CISA KEV (Known Exploited Vulnerabilities).
+ ///
+ FailCveKev,
+
+ ///
+ /// CVE with high EPSS score.
+ ///
+ FailCveEpss,
+
+ ///
+ /// CVE severity exceeds maximum allowed.
+ ///
+ FailCveSeverity,
+
+ // FAIL reasons - Policy
+ ///
+ /// License violation detected.
+ ///
+ FailPolicyLicense,
+
+ ///
+ /// Blocked package detected.
+ ///
+ FailPolicyBlockedPackage,
+
+ ///
+ /// Unknown budget exceeded.
+ ///
+ FailPolicyUnknownBudget,
+
+ ///
+ /// SBOM completeness below threshold.
+ ///
+ FailPolicySbomCompleteness,
+
+ // FAIL reasons - Provenance
+ ///
+ /// Missing provenance attestation.
+ ///
+ FailProvenanceMissing,
+
+ ///
+ /// Provenance signature invalid.
+ ///
+ FailProvenanceInvalid,
+
+ // EXCEPTION reasons
+ ///
+ /// CVE covered by approved exception.
+ ///
+ ExceptionCve,
+
+ ///
+ /// License covered by approved exception.
+ ///
+ ExceptionLicense,
+
+ ///
+ /// Unknowns covered by approved exception.
+ ///
+ ExceptionUnknown,
+
+ // INDETERMINATE reasons
+ ///
+ /// Insufficient data to evaluate.
+ ///
+ IndeterminateInsufficientData,
+
+ ///
+ /// Analyzer does not support this artifact type.
+ ///
+ IndeterminateUnsupported,
+
+ ///
+ /// Conflicting VEX statements.
+ ///
+ IndeterminateVexConflict,
+
+ ///
+ /// Required knowledge source unavailable.
+ ///
+ IndeterminateFeedUnavailable
+}
+
+///
+/// Extension methods for reason code handling.
+///
+public static class VerdictReasonCodeExtensions
+{
+ ///
+ /// Gets the category of a reason code (Pass, Fail, Exception, Indeterminate).
+ ///
+ public static string GetCategory(this VerdictReasonCode code)
+ {
+ return code.ToString() switch
+ {
+ var s when s.StartsWith("Pass") => "Pass",
+ var s when s.StartsWith("Fail") => "Fail",
+ var s when s.StartsWith("Exception") => "Exception",
+ var s when s.StartsWith("Indeterminate") => "Indeterminate",
+ _ => "Unknown"
+ };
+ }
+
+ ///
+ /// Gets a human-readable description of the reason code.
+ ///
+ public static string GetDescription(this VerdictReasonCode code)
+ {
+ return code switch
+ {
+ VerdictReasonCode.PassNoCves => "No CVEs found in artifact",
+ VerdictReasonCode.PassNotReachable => "All CVEs are not reachable",
+ VerdictReasonCode.FailCveReachable => "Reachable CVE exceeds severity threshold",
+ VerdictReasonCode.FailCveKev => "CVE in CISA Known Exploited Vulnerabilities list",
+ VerdictReasonCode.FailPolicyUnknownBudget => "Unknown budget exceeded",
+ VerdictReasonCode.IndeterminateInsufficientData => "Insufficient data to evaluate",
+ _ => code.ToString()
+ };
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `VerdictReasonCode.cs` created with all codes
+- [ ] Codes organized by category (Pass, Fail, Exception, Indeterminate)
+- [ ] CVE, Policy, Provenance failure categories
+- [ ] Extension methods for category and description
+- [ ] XML documentation on all codes
+
+---
+
+### T3: Create RvaBuilder
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T1, T2
+
+**Description**:
+Create fluent builder for constructing RVA instances.
+
+**Implementation Path**: `Attestation/RvaBuilder.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.Engine.Attestation;
+
+///
+/// Fluent builder for constructing Risk Verdict Attestations.
+///
+public sealed class RvaBuilder
+{
+ private RiskVerdictStatus _verdict;
+ private ArtifactSubject? _subject;
+ private PolicyRef? _policy;
+ private string? _snapshotId;
+ private readonly List _evidence = [];
+ private readonly List _reasonCodes = [];
+ private readonly List _exceptions = [];
+ private UnknownsSummary? _unknowns;
+ private string? _explanation;
+ private DateTimeOffset? _expiresAt;
+ private readonly Dictionary _metadata = [];
+ private readonly IHasher _hasher;
+
+ public RvaBuilder(IHasher hasher)
+ {
+ _hasher = hasher;
+ }
+
+ public RvaBuilder WithVerdict(RiskVerdictStatus verdict)
+ {
+ _verdict = verdict;
+ return this;
+ }
+
+ public RvaBuilder WithSubject(string digest, string type, string? name = null, string? uri = null)
+ {
+ _subject = new ArtifactSubject
+ {
+ Digest = digest,
+ Type = type,
+ Name = name,
+ Uri = uri
+ };
+ return this;
+ }
+
+ public RvaBuilder WithPolicy(string policyId, string version, string digest, string? uri = null)
+ {
+ _policy = new PolicyRef
+ {
+ PolicyId = policyId,
+ Version = version,
+ Digest = digest,
+ Uri = uri
+ };
+ return this;
+ }
+
+ public RvaBuilder WithKnowledgeSnapshot(string snapshotId)
+ {
+ _snapshotId = snapshotId;
+ return this;
+ }
+
+ public RvaBuilder WithEvidence(string type, string digest, string? uri = null, string? description = null)
+ {
+ _evidence.Add(new EvidenceRef
+ {
+ Type = type,
+ Digest = digest,
+ Uri = uri,
+ Description = description
+ });
+ return this;
+ }
+
+ public RvaBuilder WithReasonCode(VerdictReasonCode code)
+ {
+ if (!_reasonCodes.Contains(code))
+ _reasonCodes.Add(code);
+ return this;
+ }
+
+ public RvaBuilder WithReasonCodes(IEnumerable codes)
+ {
+ foreach (var code in codes)
+ WithReasonCode(code);
+ return this;
+ }
+
+ public RvaBuilder WithException(string exceptionId)
+ {
+ _exceptions.Add(exceptionId);
+ return this;
+ }
+
+ public RvaBuilder WithUnknowns(UnknownsSummary unknowns)
+ {
+ _unknowns = unknowns;
+ return this;
+ }
+
+ public RvaBuilder WithExplanation(string explanation)
+ {
+ _explanation = explanation;
+ return this;
+ }
+
+ public RvaBuilder WithExpiration(DateTimeOffset expiresAt)
+ {
+ _expiresAt = expiresAt;
+ return this;
+ }
+
+ public RvaBuilder WithMetadata(string key, string value)
+ {
+ _metadata[key] = value;
+ return this;
+ }
+
+ ///
+ /// Builds the RVA from a policy evaluation result.
+ ///
+ public RvaBuilder FromEvaluationResult(PolicyEvaluationResult result)
+ {
+ _verdict = MapDecision(result.Decision);
+ _subject = new ArtifactSubject
+ {
+ Digest = result.ArtifactDigest,
+ Type = "container-image",
+ Name = result.ArtifactName
+ };
+ _snapshotId = result.KnowledgeSnapshotId;
+ _unknowns = result.UnknownsSummary;
+
+ foreach (var exc in result.AppliedExceptions)
+ _exceptions.Add(exc);
+
+ // Derive reason codes from findings
+ foreach (var finding in result.Findings.Where(f => f.IsBlocking))
+ {
+ var code = DeriveReasonCode(finding);
+ WithReasonCode(code);
+ }
+
+ return this;
+ }
+
+ public RiskVerdictAttestation Build()
+ {
+ if (_subject is null)
+ throw new InvalidOperationException("Subject is required");
+ if (_policy is null)
+ throw new InvalidOperationException("Policy is required");
+ if (_snapshotId is null)
+ throw new InvalidOperationException("Knowledge snapshot ID is required");
+
+ var attestation = new RiskVerdictAttestation
+ {
+ AttestationId = "", // Computed below
+ CreatedAt = DateTimeOffset.UtcNow,
+ Verdict = _verdict,
+ Subject = _subject,
+ Policy = _policy,
+ KnowledgeSnapshotId = _snapshotId,
+ Evidence = _evidence.ToList(),
+ ReasonCodes = _reasonCodes.ToList(),
+ AppliedExceptions = _exceptions.ToList(),
+ Unknowns = _unknowns,
+ Explanation = _explanation ?? GenerateExplanation(),
+ ExpiresAt = _expiresAt,
+ Metadata = _metadata.ToDictionary()
+ };
+
+ // Compute content-addressed ID
+ var attestationId = ComputeAttestationId(attestation);
+
+ return attestation with { AttestationId = attestationId };
+ }
+
+ private string ComputeAttestationId(RiskVerdictAttestation attestation)
+ {
+ var json = JsonSerializer.Serialize(attestation with { AttestationId = "" },
+ new JsonSerializerOptions
+ {
+ PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
+ WriteIndented = false
+ });
+
+ var hash = _hasher.ComputeSha256(json);
+ return $"rva:sha256:{hash}";
+ }
+
+ private static RiskVerdictStatus MapDecision(PolicyDecision decision)
+ {
+ return decision switch
+ {
+ PolicyDecision.Pass => RiskVerdictStatus.Pass,
+ PolicyDecision.Fail => RiskVerdictStatus.Fail,
+ PolicyDecision.PassWithExceptions => RiskVerdictStatus.PassWithExceptions,
+ PolicyDecision.Indeterminate => RiskVerdictStatus.Indeterminate,
+ _ => RiskVerdictStatus.Indeterminate
+ };
+ }
+
+ private VerdictReasonCode DeriveReasonCode(Finding finding)
+ {
+ return finding.Type switch
+ {
+ "cve" when finding.IsReachable == true => VerdictReasonCode.FailCveReachable,
+ "cve" when finding.IsInKev == true => VerdictReasonCode.FailCveKev,
+ "license" => VerdictReasonCode.FailPolicyLicense,
+ "blocked-package" => VerdictReasonCode.FailPolicyBlockedPackage,
+ "unknown-budget" => VerdictReasonCode.FailPolicyUnknownBudget,
+ _ => VerdictReasonCode.FailCveSeverity
+ };
+ }
+
+ private string GenerateExplanation()
+ {
+ if (_reasonCodes.Count == 0)
+ return $"Verdict: {_verdict}";
+
+ var reasons = string.Join(", ", _reasonCodes.Take(3).Select(c => c.GetDescription()));
+ return $"Verdict: {_verdict}. Reasons: {reasons}";
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `RvaBuilder.cs` created with fluent API
+- [ ] `FromEvaluationResult` for easy conversion
+- [ ] Content-addressed attestation ID computed
+- [ ] Auto-generated explanation
+- [ ] Reason code derivation from findings
+- [ ] Validation on Build()
+
+---
+
+### T4: Integrate Knowledge Snapshot Reference
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T3
+
+**Description**:
+Ensure RVA includes knowledge snapshot reference for replay.
+
+**Implementation Path**: `Attestation/RvaBuilder.cs`, `Services/PolicyEvaluator.cs`
+
+**Integration**:
+```csharp
+// In PolicyEvaluator after evaluation
+public async Task CreateAttestationAsync(
+ PolicyEvaluationResult result,
+ CancellationToken ct = default)
+{
+ // Ensure snapshot exists
+ if (result.KnowledgeSnapshotId is null)
+ {
+ throw new InvalidOperationException("Evaluation must have knowledge snapshot for attestation");
+ }
+
+ var attestation = new RvaBuilder(_hasher)
+ .FromEvaluationResult(result)
+ .WithPolicy(_policyRef.Id, _policyRef.Version, _policyRef.Digest)
+ .WithEvidence("sbom", result.SbomDigest, description: "SBOM used for analysis")
+ .WithEvidence("reachability", result.ReachabilityDigest, description: "Call graph analysis")
+ .Build();
+
+ // Log for observability
+ _logger.LogInformation(
+ "Created RVA {AttestationId} with verdict {Verdict} for {Artifact}",
+ attestation.AttestationId, attestation.Verdict, result.ArtifactDigest);
+
+ // Store attestation
+ await _attestationStore.SaveAsync(attestation, ct);
+
+ return attestation;
+}
+```
+
+**Replay Support**:
+```csharp
+///
+/// Validates that an RVA can be replayed.
+///
+public async Task ValidateForReplayAsync(
+ RiskVerdictAttestation attestation,
+ CancellationToken ct = default)
+{
+ // Check snapshot exists
+ var snapshot = await _snapshotService.GetSnapshotAsync(attestation.KnowledgeSnapshotId, ct);
+ if (snapshot is null)
+ {
+ return ReplayValidation.Fail("Knowledge snapshot not found");
+ }
+
+ // Check snapshot integrity
+ var verification = await _snapshotService.VerifySnapshotAsync(snapshot, ct);
+ if (!verification.IsValid)
+ {
+ return ReplayValidation.Fail($"Snapshot verification failed: {verification.Error}");
+ }
+
+ return ReplayValidation.Success(snapshot);
+}
+
+public sealed record ReplayValidation(bool CanReplay, string? Error, KnowledgeSnapshotManifest? Snapshot)
+{
+ public static ReplayValidation Success(KnowledgeSnapshotManifest snapshot) =>
+ new(true, null, snapshot);
+ public static ReplayValidation Fail(string error) =>
+ new(false, error, null);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] RVA always includes KnowledgeSnapshotId
+- [ ] Evaluation without snapshot throws
+- [ ] Evidence references added (SBOM, reachability)
+- [ ] Replay validation method added
+- [ ] Attestation stored after creation
+
+---
+
+### T5: Update Predicate Type
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Define the in-toto predicate type for RVA.
+
+**Implementation Path**: `Attestation/RvaPredicate.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.Engine.Attestation;
+
+///
+/// In-toto predicate wrapper for Risk Verdict Attestations.
+///
+public sealed class RvaPredicate
+{
+ ///
+ /// Predicate type URI for RVA.
+ ///
+ public const string PredicateType = "https://stella.ops/predicates/risk-verdict@v1";
+
+ ///
+ /// Creates an in-toto statement from an RVA.
+ ///
+ public static InTotoStatement CreateStatement(RiskVerdictAttestation attestation)
+ {
+ return new InTotoStatement
+ {
+ Type = "https://in-toto.io/Statement/v1",
+ Subject = new[]
+ {
+ new InTotoSubject
+ {
+ Name = attestation.Subject.Name ?? attestation.Subject.Digest,
+ Digest = new Dictionary
+ {
+ ["sha256"] = attestation.Subject.Digest.Replace("sha256:", "")
+ }
+ }
+ },
+ PredicateType = PredicateType,
+ Predicate = new RvaPredicateContent
+ {
+ AttestationId = attestation.AttestationId,
+ Verdict = attestation.Verdict.ToString(),
+ Policy = new PolicyPredicateRef
+ {
+ Id = attestation.Policy.PolicyId,
+ Version = attestation.Policy.Version,
+ Digest = attestation.Policy.Digest
+ },
+ KnowledgeSnapshotId = attestation.KnowledgeSnapshotId,
+ ReasonCodes = attestation.ReasonCodes.Select(c => c.ToString()).ToList(),
+ Unknowns = attestation.Unknowns is not null ? new UnknownsPredicateRef
+ {
+ Total = attestation.Unknowns.Total,
+ BlockingCount = attestation.Unknowns.BlockingCount
+ } : null,
+ AppliedExceptions = attestation.AppliedExceptions.ToList(),
+ Explanation = attestation.Explanation,
+ CreatedAt = attestation.CreatedAt.ToString("o"),
+ ExpiresAt = attestation.ExpiresAt?.ToString("o")
+ }
+ };
+ }
+}
+
+public sealed record InTotoStatement
+{
+ [JsonPropertyName("_type")]
+ public required string Type { get; init; }
+
+ [JsonPropertyName("subject")]
+ public required InTotoSubject[] Subject { get; init; }
+
+ [JsonPropertyName("predicateType")]
+ public required string PredicateType { get; init; }
+
+ [JsonPropertyName("predicate")]
+ public required object Predicate { get; init; }
+}
+
+public sealed record InTotoSubject
+{
+ [JsonPropertyName("name")]
+ public required string Name { get; init; }
+
+ [JsonPropertyName("digest")]
+ public required Dictionary Digest { get; init; }
+}
+
+public sealed record RvaPredicateContent
+{
+ [JsonPropertyName("attestationId")]
+ public required string AttestationId { get; init; }
+
+ [JsonPropertyName("verdict")]
+ public required string Verdict { get; init; }
+
+ [JsonPropertyName("policy")]
+ public required PolicyPredicateRef Policy { get; init; }
+
+ [JsonPropertyName("knowledgeSnapshotId")]
+ public required string KnowledgeSnapshotId { get; init; }
+
+ [JsonPropertyName("reasonCodes")]
+ public required IReadOnlyList ReasonCodes { get; init; }
+
+ [JsonPropertyName("unknowns")]
+ public UnknownsPredicateRef? Unknowns { get; init; }
+
+ [JsonPropertyName("appliedExceptions")]
+ public required IReadOnlyList AppliedExceptions { get; init; }
+
+ [JsonPropertyName("explanation")]
+ public string? Explanation { get; init; }
+
+ [JsonPropertyName("createdAt")]
+ public required string CreatedAt { get; init; }
+
+ [JsonPropertyName("expiresAt")]
+ public string? ExpiresAt { get; init; }
+}
+
+public sealed record PolicyPredicateRef
+{
+ [JsonPropertyName("id")]
+ public required string Id { get; init; }
+
+ [JsonPropertyName("version")]
+ public required string Version { get; init; }
+
+ [JsonPropertyName("digest")]
+ public required string Digest { get; init; }
+}
+
+public sealed record UnknownsPredicateRef
+{
+ [JsonPropertyName("total")]
+ public int Total { get; init; }
+
+ [JsonPropertyName("blockingCount")]
+ public int BlockingCount { get; init; }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `RvaPredicate.cs` created
+- [ ] Predicate type: `https://stella.ops/predicates/risk-verdict@v1`
+- [ ] In-toto statement structure correct
+- [ ] All RVA fields mapped to predicate
+- [ ] JSON property names in camelCase
+
+---
+
+### T6: Create RvaVerifier
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T1, T5
+
+**Description**:
+Implement verification of RVA signatures and integrity.
+
+**Implementation Path**: `Attestation/RvaVerifier.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.Engine.Attestation;
+
+///
+/// Verifies Risk Verdict Attestation signatures and integrity.
+///
+public sealed class RvaVerifier : IRvaVerifier
+{
+ private readonly ISigner _signer;
+ private readonly ISnapshotService _snapshotService;
+ private readonly ITrustStore _trustStore;
+ private readonly ILogger _logger;
+
+ public RvaVerifier(
+ ISigner signer,
+ ISnapshotService snapshotService,
+ ITrustStore trustStore,
+ ILogger logger)
+ {
+ _signer = signer;
+ _snapshotService = snapshotService;
+ _trustStore = trustStore;
+ _logger = logger;
+ }
+
+ ///
+ /// Verifies a DSSE-wrapped RVA.
+ ///
+ public async Task VerifyAsync(
+ DsseEnvelope envelope,
+ RvaVerificationOptions options,
+ CancellationToken ct = default)
+ {
+ var issues = new List();
+
+ // Step 1: Verify DSSE signature
+ var sigResult = await VerifySignatureAsync(envelope, options, ct);
+ if (!sigResult.IsValid)
+ {
+ issues.Add($"Signature verification failed: {sigResult.Error}");
+ if (!options.ContinueOnSignatureFailure)
+ {
+ return RvaVerificationResult.Fail(issues);
+ }
+ }
+
+ // Step 2: Parse payload
+ var attestation = ParsePayload(envelope);
+ if (attestation is null)
+ {
+ issues.Add("Failed to parse RVA payload");
+ return RvaVerificationResult.Fail(issues);
+ }
+
+ // Step 3: Verify content-addressed ID
+ var idValid = VerifyAttestationId(attestation);
+ if (!idValid)
+ {
+ issues.Add("Attestation ID does not match content");
+ return RvaVerificationResult.Fail(issues);
+ }
+
+ // Step 4: Verify expiration
+ if (options.CheckExpiration && attestation.ExpiresAt.HasValue)
+ {
+ if (attestation.ExpiresAt.Value < DateTimeOffset.UtcNow)
+ {
+ issues.Add($"Attestation expired at {attestation.ExpiresAt.Value:o}");
+ if (!options.AllowExpired)
+ {
+ return RvaVerificationResult.Fail(issues);
+ }
+ }
+ }
+
+ // Step 5: Verify knowledge snapshot exists (if requested)
+ if (options.VerifySnapshotExists)
+ {
+ var snapshot = await _snapshotService.GetSnapshotAsync(attestation.KnowledgeSnapshotId, ct);
+ if (snapshot is null)
+ {
+ issues.Add($"Knowledge snapshot {attestation.KnowledgeSnapshotId} not found");
+ }
+ }
+
+ // Step 6: Verify signer identity against trust store
+ if (options.VerifySignerIdentity && sigResult.SignerIdentity is not null)
+ {
+ var trusted = await _trustStore.IsTrustedSignerAsync(sigResult.SignerIdentity, ct);
+ if (!trusted)
+ {
+ issues.Add($"Signer {sigResult.SignerIdentity} is not in trust store");
+ }
+ }
+
+ var isValid = issues.Count == 0 ||
+ (issues.All(i => i.Contains("expired") && options.AllowExpired));
+
+ return new RvaVerificationResult
+ {
+ IsValid = isValid,
+ Attestation = attestation,
+ SignerIdentity = sigResult.SignerIdentity,
+ Issues = issues,
+ VerifiedAt = DateTimeOffset.UtcNow
+ };
+ }
+
+ ///
+ /// Quick verification of just the signature.
+ ///
+ public async Task VerifySignatureAsync(
+ DsseEnvelope envelope,
+ RvaVerificationOptions options,
+ CancellationToken ct = default)
+ {
+ try
+ {
+ var payload = Convert.FromBase64String(envelope.Payload);
+ var signature = Convert.FromBase64String(envelope.Signatures[0].Sig);
+
+ var isValid = await _signer.VerifyAsync(payload, signature, ct);
+
+ return new SignatureVerificationResult
+ {
+ IsValid = isValid,
+ SignerIdentity = envelope.Signatures[0].KeyId
+ };
+ }
+ catch (Exception ex)
+ {
+ _logger.LogWarning(ex, "Signature verification failed");
+ return new SignatureVerificationResult
+ {
+ IsValid = false,
+ Error = ex.Message
+ };
+ }
+ }
+
+ private RiskVerdictAttestation? ParsePayload(DsseEnvelope envelope)
+ {
+ try
+ {
+ var payloadBytes = Convert.FromBase64String(envelope.Payload);
+ var statement = JsonSerializer.Deserialize(payloadBytes);
+
+ if (statement?.PredicateType != RvaPredicate.PredicateType)
+ return null;
+
+ var predicateJson = JsonSerializer.Serialize(statement.Predicate);
+ var predicate = JsonSerializer.Deserialize(predicateJson);
+
+ // Convert predicate back to RVA (simplified)
+ return ConvertToRva(statement, predicate!);
+ }
+ catch
+ {
+ return null;
+ }
+ }
+
+ private bool VerifyAttestationId(RiskVerdictAttestation attestation)
+ {
+ var json = JsonSerializer.Serialize(attestation with { AttestationId = "" },
+ new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase });
+ var expectedId = $"rva:sha256:{ComputeSha256(json)}";
+ return attestation.AttestationId == expectedId;
+ }
+
+ private static string ComputeSha256(string input)
+ {
+ var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
+ return Convert.ToHexString(bytes).ToLowerInvariant();
+ }
+}
+
+public sealed record RvaVerificationResult
+{
+ public required bool IsValid { get; init; }
+ public RiskVerdictAttestation? Attestation { get; init; }
+ public string? SignerIdentity { get; init; }
+ public IReadOnlyList Issues { get; init; } = [];
+ public DateTimeOffset VerifiedAt { get; init; }
+
+ public static RvaVerificationResult Fail(IReadOnlyList issues) =>
+ new() { IsValid = false, Issues = issues, VerifiedAt = DateTimeOffset.UtcNow };
+}
+
+public sealed record SignatureVerificationResult
+{
+ public required bool IsValid { get; init; }
+ public string? SignerIdentity { get; init; }
+ public string? Error { get; init; }
+}
+
+public sealed record RvaVerificationOptions
+{
+ public bool CheckExpiration { get; init; } = true;
+ public bool AllowExpired { get; init; } = false;
+ public bool VerifySnapshotExists { get; init; } = false;
+ public bool VerifySignerIdentity { get; init; } = true;
+ public bool ContinueOnSignatureFailure { get; init; } = false;
+
+ public static RvaVerificationOptions Default { get; } = new();
+ public static RvaVerificationOptions Strict { get; } = new()
+ {
+ VerifySnapshotExists = true,
+ AllowExpired = false
+ };
+}
+
+public interface IRvaVerifier
+{
+ Task VerifyAsync(DsseEnvelope envelope, RvaVerificationOptions options, CancellationToken ct = default);
+ Task VerifySignatureAsync(DsseEnvelope envelope, RvaVerificationOptions options, CancellationToken ct = default);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `RvaVerifier.cs` created
+- [ ] DSSE signature verification
+- [ ] Content-addressed ID verification
+- [ ] Expiration checking with configurable behavior
+- [ ] Snapshot existence verification (optional)
+- [ ] Signer identity trust verification
+- [ ] Comprehensive verification result
+
+---
+
+### T7: Add Tests
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T6
+
+**Description**:
+Add comprehensive tests for RVA creation and verification.
+
+**Implementation Path**: `src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Attestation/`
+
+**Test Cases**:
+```csharp
+public class RvaBuilderTests
+{
+ [Fact]
+ public void Build_ValidInputs_CreatesRva()
+ {
+ var rva = new RvaBuilder(_hasher)
+ .WithVerdict(RiskVerdictStatus.Pass)
+ .WithSubject("sha256:abc123", "container-image", "myapp:v1.0")
+ .WithPolicy("policy-1", "1.0", "sha256:xyz")
+ .WithKnowledgeSnapshot("ksm:sha256:def456")
+ .WithReasonCode(VerdictReasonCode.PassNoCves)
+ .Build();
+
+ rva.AttestationId.Should().StartWith("rva:sha256:");
+ rva.Verdict.Should().Be(RiskVerdictStatus.Pass);
+ rva.ReasonCodes.Should().Contain(VerdictReasonCode.PassNoCves);
+ }
+
+ [Fact]
+ public void Build_MissingSubject_Throws()
+ {
+ var builder = new RvaBuilder(_hasher)
+ .WithVerdict(RiskVerdictStatus.Pass)
+ .WithPolicy("p", "1.0", "sha256:x")
+ .WithKnowledgeSnapshot("ksm:sha256:y");
+
+ var act = () => builder.Build();
+
+ act.Should().Throw()
+ .WithMessage("*Subject*");
+ }
+
+ [Fact]
+ public void FromEvaluationResult_MapsCorrectly()
+ {
+ var result = CreateEvaluationResult(PolicyDecision.Fail, findings: new[]
+ {
+ CreateFinding("CVE-2024-001", isReachable: true)
+ });
+
+ var rva = new RvaBuilder(_hasher)
+ .FromEvaluationResult(result)
+ .WithPolicy("p", "1.0", "sha256:x")
+ .Build();
+
+ rva.Verdict.Should().Be(RiskVerdictStatus.Fail);
+ rva.ReasonCodes.Should().Contain(VerdictReasonCode.FailCveReachable);
+ }
+
+ [Fact]
+ public void Build_ContentAddressedId_IsDeterministic()
+ {
+ var builder1 = CreateBuilder();
+ var builder2 = CreateBuilder();
+
+ var rva1 = builder1.Build();
+ var rva2 = builder2.Build();
+
+ rva1.AttestationId.Should().Be(rva2.AttestationId);
+ }
+}
+
+public class RvaVerifierTests
+{
+ [Fact]
+ public async Task Verify_ValidSignature_ReturnsSuccess()
+ {
+ var rva = CreateRva();
+ var envelope = await SignRvaAsync(rva);
+
+ var result = await _verifier.VerifyAsync(envelope, RvaVerificationOptions.Default);
+
+ result.IsValid.Should().BeTrue();
+ result.Attestation.Should().NotBeNull();
+ }
+
+ [Fact]
+ public async Task Verify_TamperedPayload_ReturnsFailure()
+ {
+ var rva = CreateRva();
+ var envelope = await SignRvaAsync(rva);
+ var tampered = TamperWithPayload(envelope);
+
+ var result = await _verifier.VerifyAsync(tampered, RvaVerificationOptions.Default);
+
+ result.IsValid.Should().BeFalse();
+ result.Issues.Should().Contain(i => i.Contains("Signature"));
+ }
+
+ [Fact]
+ public async Task Verify_ExpiredRva_FailsByDefault()
+ {
+ var rva = CreateRva(expiresAt: DateTimeOffset.UtcNow.AddDays(-1));
+ var envelope = await SignRvaAsync(rva);
+
+ var result = await _verifier.VerifyAsync(envelope, RvaVerificationOptions.Default);
+
+ result.IsValid.Should().BeFalse();
+ result.Issues.Should().Contain(i => i.Contains("expired"));
+ }
+
+ [Fact]
+ public async Task Verify_ExpiredRva_AllowedWithOption()
+ {
+ var rva = CreateRva(expiresAt: DateTimeOffset.UtcNow.AddDays(-1));
+ var envelope = await SignRvaAsync(rva);
+ var options = new RvaVerificationOptions { AllowExpired = true };
+
+ var result = await _verifier.VerifyAsync(envelope, options);
+
+ result.IsValid.Should().BeTrue();
+ }
+
+ [Fact]
+ public async Task Verify_InvalidAttestationId_Fails()
+ {
+ var rva = CreateRva() with { AttestationId = "rva:sha256:tampered" };
+ var envelope = await SignRvaAsync(rva);
+
+ var result = await _verifier.VerifyAsync(envelope, RvaVerificationOptions.Default);
+
+ result.IsValid.Should().BeFalse();
+ result.Issues.Should().Contain(i => i.Contains("ID"));
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] Builder tests for valid/invalid inputs
+- [ ] Builder determinism test
+- [ ] FromEvaluationResult mapping test
+- [ ] Verifier signature verification test
+- [ ] Verifier tamper detection test
+- [ ] Verifier expiration tests
+- [ ] All 6+ tests pass
+
+---
+
+## Delivery Tracker
+
+| # | Task ID | Status | Dependency | Owners | Task Definition |
+|---|---------|--------|------------|--------|-----------------|
+| 1 | T1 | TODO | — | Policy Team | Define RiskVerdictAttestation model |
+| 2 | T2 | TODO | — | Policy Team | Define VerdictReasonCode enum |
+| 3 | T3 | TODO | T1, T2 | Policy Team | Create RvaBuilder |
+| 4 | T4 | TODO | T3 | Policy Team | Integrate knowledge snapshot reference |
+| 5 | T5 | TODO | T1 | Policy Team | Update predicate type |
+| 6 | T6 | TODO | T1, T5 | Policy Team | Create RvaVerifier |
+| 7 | T7 | TODO | T6 | Policy Team | Add tests |
+
+---
+
+## Execution Log
+
+| Date (UTC) | Update | Owner |
+|------------|--------|-------|
+| 2025-12-21 | Sprint created from MOAT Phase 2 gap analysis. RVA contract identified as requirement from Moat #2 advisory. | Claude |
+
+---
+
+## Decisions & Risks
+
+| Item | Type | Owner | Notes |
+|------|------|-------|-------|
+| Four verdict statuses | Decision | Policy Team | PASS/FAIL/PASS_WITH_EXCEPTIONS/INDETERMINATE covers all cases |
+| Content-addressed ID | Decision | Policy Team | rva:sha256:{hash} ensures immutability |
+| In-toto predicate type | Decision | Policy Team | stella.ops/predicates/risk-verdict@v1 |
+| Expiration support | Decision | Policy Team | Optional but recommended for time-sensitive verdicts |
+
+---
+
+## Success Criteria
+
+- [ ] All 7 tasks marked DONE
+- [ ] RVA model supports all verdict types
+- [ ] Builder creates valid attestations
+- [ ] Verifier catches tampering
+- [ ] Predicate type follows in-toto spec
+- [ ] 6+ tests passing
+- [ ] `dotnet build` succeeds
+- [ ] `dotnet test` succeeds
diff --git a/docs/implplan/SPRINT_4100_0003_0002_oci_referrer_push.md b/docs/implplan/SPRINT_4100_0003_0002_oci_referrer_push.md
new file mode 100644
index 000000000..d577a9fe3
--- /dev/null
+++ b/docs/implplan/SPRINT_4100_0003_0002_oci_referrer_push.md
@@ -0,0 +1,1344 @@
+# Sprint 4100.0003.0002 · OCI Referrer Push & Discovery
+
+## Topic & Scope
+
+- Implement OCI artifact push with subject binding (referrers API)
+- Enable RVA attachment to container images
+- Support discovery of attestations by image digest
+
+**Working directory:** `src/ExportCenter/StellaOps.ExportCenter.WebService/Distribution/Oci/`
+
+## Dependencies & Concurrency
+
+- **Upstream**: Sprint 4100.0003.0001 (Risk Verdict Attestation Contract) — MUST BE DONE
+- **Downstream**: None
+- **Safe to parallelize with**: Sprint 4100.0001.0002, Sprint 4100.0002.0002
+
+## Documentation Prerequisites
+
+- Sprint 4100.0003.0001 completion (RiskVerdictAttestation)
+- `src/ExportCenter/StellaOps.ExportCenter.WebService/AGENTS.md`
+- `docs/product-advisories/19-Dec-2025 - Moat #2.md` (Risk Verdict Attestation)
+- OCI Distribution Spec: Referrers API
+
+---
+
+## Tasks
+
+### T1: Implement OCI Push Client
+
+**Assignee**: ExportCenter Team
+**Story Points**: 4
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Implement OCI registry push client with subject binding support.
+
+**Implementation Path**: `Oci/OciPushClient.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.ExportCenter.WebService.Distribution.Oci;
+
+///
+/// Client for pushing artifacts to OCI registries with referrer support.
+///
+public sealed class OciPushClient : IOciPushClient
+{
+ private readonly HttpClient _httpClient;
+ private readonly IOciAuthProvider _authProvider;
+ private readonly ILogger _logger;
+
+ public OciPushClient(
+ HttpClient httpClient,
+ IOciAuthProvider authProvider,
+ ILogger logger)
+ {
+ _httpClient = httpClient;
+ _authProvider = authProvider;
+ _logger = logger;
+ }
+
+ ///
+ /// Pushes an artifact to the registry with subject binding.
+ ///
+ public async Task PushArtifactAsync(
+ OciPushRequest request,
+ CancellationToken ct = default)
+ {
+ _logger.LogInformation("Pushing artifact to {Registry}/{Repository}",
+ request.Registry, request.Repository);
+
+ try
+ {
+ // Authenticate
+ var token = await _authProvider.GetTokenAsync(request.Registry, request.Repository, ct);
+
+ // Step 1: Push config blob (empty for attestations)
+ var configDigest = await PushBlobAsync(
+ request.Registry, request.Repository,
+ request.Config, token, ct);
+
+ // Step 2: Push artifact content as blob
+ var contentDigest = await PushBlobAsync(
+ request.Registry, request.Repository,
+ request.Content, token, ct);
+
+ // Step 3: Create and push manifest with subject
+ var manifest = CreateManifest(request, configDigest, contentDigest);
+ var manifestDigest = await PushManifestAsync(
+ request.Registry, request.Repository,
+ manifest, token, ct);
+
+ _logger.LogInformation("Pushed artifact {Digest} to {Registry}/{Repository}",
+ manifestDigest, request.Registry, request.Repository);
+
+ return new OciPushResult
+ {
+ IsSuccess = true,
+ Digest = manifestDigest,
+ Registry = request.Registry,
+ Repository = request.Repository
+ };
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to push artifact to {Registry}/{Repository}",
+ request.Registry, request.Repository);
+
+ return new OciPushResult
+ {
+ IsSuccess = false,
+ Error = ex.Message
+ };
+ }
+ }
+
+ private async Task PushBlobAsync(
+ string registry, string repository,
+ byte[] content, string token, CancellationToken ct)
+ {
+ var digest = ComputeDigest(content);
+
+ // Check if blob exists
+ var checkUrl = $"https://{registry}/v2/{repository}/blobs/{digest}";
+ var checkRequest = new HttpRequestMessage(HttpMethod.Head, checkUrl);
+ checkRequest.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token);
+
+ var checkResponse = await _httpClient.SendAsync(checkRequest, ct);
+ if (checkResponse.IsSuccessStatusCode)
+ {
+ _logger.LogDebug("Blob {Digest} already exists", digest);
+ return digest;
+ }
+
+ // Start upload session
+ var uploadUrl = $"https://{registry}/v2/{repository}/blobs/uploads/";
+ var uploadRequest = new HttpRequestMessage(HttpMethod.Post, uploadUrl);
+ uploadRequest.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token);
+
+ var uploadResponse = await _httpClient.SendAsync(uploadRequest, ct);
+ uploadResponse.EnsureSuccessStatusCode();
+
+ var location = uploadResponse.Headers.Location?.ToString()
+ ?? throw new InvalidOperationException("No upload location returned");
+
+ // Complete upload
+ var completeUrl = location.Contains('?')
+ ? $"{location}&digest={digest}"
+ : $"{location}?digest={digest}";
+
+ var completeRequest = new HttpRequestMessage(HttpMethod.Put, completeUrl);
+ completeRequest.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token);
+ completeRequest.Content = new ByteArrayContent(content);
+ completeRequest.Content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream");
+
+ var completeResponse = await _httpClient.SendAsync(completeRequest, ct);
+ completeResponse.EnsureSuccessStatusCode();
+
+ return digest;
+ }
+
+ private OciManifest CreateManifest(OciPushRequest request, string configDigest, string contentDigest)
+ {
+ var manifest = new OciManifest
+ {
+ SchemaVersion = 2,
+ MediaType = OciMediaTypes.ImageManifest,
+ Config = new OciDescriptor
+ {
+ MediaType = request.ConfigMediaType ?? OciMediaTypes.EmptyConfig,
+ Digest = configDigest,
+ Size = request.Config.Length
+ },
+ Layers = new[]
+ {
+ new OciDescriptor
+ {
+ MediaType = request.ContentMediaType,
+ Digest = contentDigest,
+ Size = request.Content.Length,
+ Annotations = request.Annotations
+ }
+ },
+ Annotations = request.ManifestAnnotations
+ };
+
+ // Add subject for referrer binding
+ if (request.SubjectDigest is not null)
+ {
+ manifest.Subject = new OciDescriptor
+ {
+ MediaType = OciMediaTypes.ImageManifest,
+ Digest = request.SubjectDigest
+ };
+ }
+
+ return manifest;
+ }
+
+ private async Task PushManifestAsync(
+ string registry, string repository,
+ OciManifest manifest, string token, CancellationToken ct)
+ {
+ var json = JsonSerializer.Serialize(manifest, new JsonSerializerOptions
+ {
+ PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
+ DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
+ });
+
+ var digest = ComputeDigest(Encoding.UTF8.GetBytes(json));
+
+ var url = $"https://{registry}/v2/{repository}/manifests/{digest}";
+ var request = new HttpRequestMessage(HttpMethod.Put, url);
+ request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token);
+ request.Content = new StringContent(json, Encoding.UTF8, OciMediaTypes.ImageManifest);
+
+ var response = await _httpClient.SendAsync(request, ct);
+ response.EnsureSuccessStatusCode();
+
+ return digest;
+ }
+
+ private static string ComputeDigest(byte[] content)
+ {
+ var hash = SHA256.HashData(content);
+ return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
+ }
+}
+
+public sealed record OciPushRequest
+{
+ public required string Registry { get; init; }
+ public required string Repository { get; init; }
+ public required byte[] Content { get; init; }
+ public required string ContentMediaType { get; init; }
+ public byte[] Config { get; init; } = [];
+ public string? ConfigMediaType { get; init; }
+ public string? SubjectDigest { get; init; }
+ public IReadOnlyDictionary? Annotations { get; init; }
+ public IReadOnlyDictionary? ManifestAnnotations { get; init; }
+}
+
+public sealed record OciPushResult
+{
+ public required bool IsSuccess { get; init; }
+ public string? Digest { get; init; }
+ public string? Registry { get; init; }
+ public string? Repository { get; init; }
+ public string? Error { get; init; }
+}
+
+public interface IOciPushClient
+{
+ Task PushArtifactAsync(OciPushRequest request, CancellationToken ct = default);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `OciPushClient.cs` created
+- [ ] Blob upload with digest check
+- [ ] Manifest creation with subject binding
+- [ ] Bearer token authentication
+- [ ] Error handling and logging
+- [ ] Interface for DI
+
+---
+
+### T2: Add Referrer Discovery
+
+**Assignee**: ExportCenter Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Implement discovery of artifacts via OCI referrers API.
+
+**Implementation Path**: `Oci/OciReferrerDiscovery.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.ExportCenter.WebService.Distribution.Oci;
+
+///
+/// Discovers artifacts attached to images via the OCI referrers API.
+///
+public sealed class OciReferrerDiscovery : IOciReferrerDiscovery
+{
+ private readonly HttpClient _httpClient;
+ private readonly IOciAuthProvider _authProvider;
+ private readonly ILogger _logger;
+
+ ///
+ /// Lists all referrers for a given image digest.
+ ///
+ public async Task ListReferrersAsync(
+ string registry, string repository, string digest,
+ ReferrerFilterOptions? filter = null,
+ CancellationToken ct = default)
+ {
+ _logger.LogDebug("Listing referrers for {Registry}/{Repository}@{Digest}",
+ registry, repository, digest);
+
+ try
+ {
+ var token = await _authProvider.GetTokenAsync(registry, repository, ct);
+
+ // Try referrers API first (OCI 1.1+)
+ var result = await TryReferrersApiAsync(registry, repository, digest, token, filter, ct);
+ if (result is not null)
+ return result;
+
+ // Fall back to tag-based discovery
+ return await FallbackTagDiscoveryAsync(registry, repository, digest, token, filter, ct);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to list referrers for {Digest}", digest);
+ return new ReferrerListResult
+ {
+ IsSuccess = false,
+ Error = ex.Message
+ };
+ }
+ }
+
+ private async Task TryReferrersApiAsync(
+ string registry, string repository, string digest, string token,
+ ReferrerFilterOptions? filter, CancellationToken ct)
+ {
+ var url = $"https://{registry}/v2/{repository}/referrers/{digest}";
+ if (filter?.ArtifactType is not null)
+ {
+ url += $"?artifactType={Uri.EscapeDataString(filter.ArtifactType)}";
+ }
+
+ var request = new HttpRequestMessage(HttpMethod.Get, url);
+ request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token);
+ request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue(OciMediaTypes.ImageIndex));
+
+ var response = await _httpClient.SendAsync(request, ct);
+
+ if (response.StatusCode == HttpStatusCode.NotFound)
+ {
+ // Registry doesn't support referrers API
+ return null;
+ }
+
+ response.EnsureSuccessStatusCode();
+
+ var json = await response.Content.ReadAsStringAsync(ct);
+ var index = JsonSerializer.Deserialize(json, new JsonSerializerOptions
+ {
+ PropertyNamingPolicy = JsonNamingPolicy.CamelCase
+ });
+
+ return new ReferrerListResult
+ {
+ IsSuccess = true,
+ Referrers = index?.Manifests?.Select(m => new ReferrerInfo
+ {
+ Digest = m.Digest,
+ ArtifactType = m.ArtifactType,
+ MediaType = m.MediaType,
+ Size = m.Size,
+ Annotations = m.Annotations ?? new Dictionary()
+ }).ToList() ?? [],
+ SupportsReferrersApi = true
+ };
+ }
+
+ private async Task FallbackTagDiscoveryAsync(
+ string registry, string repository, string digest, string token,
+ ReferrerFilterOptions? filter, CancellationToken ct)
+ {
+ // Fallback: Check for tagged index at sha256-{hash}
+ var hashPart = digest.Replace("sha256:", "");
+ var tagPrefix = $"sha256-{hashPart}";
+
+ var url = $"https://{registry}/v2/{repository}/tags/list";
+ var request = new HttpRequestMessage(HttpMethod.Get, url);
+ request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token);
+
+ var response = await _httpClient.SendAsync(request, ct);
+ response.EnsureSuccessStatusCode();
+
+ var json = await response.Content.ReadAsStringAsync(ct);
+ var tagList = JsonSerializer.Deserialize(json);
+
+ var matchingTags = tagList?.Tags?
+ .Where(t => t.StartsWith(tagPrefix))
+ .ToList() ?? [];
+
+ var referrers = new List();
+ foreach (var tag in matchingTags)
+ {
+ var manifest = await GetManifestAsync(registry, repository, tag, token, ct);
+ if (manifest is not null)
+ {
+ referrers.Add(new ReferrerInfo
+ {
+ Digest = ComputeManifestDigest(manifest),
+ ArtifactType = manifest.ArtifactType,
+ MediaType = manifest.MediaType,
+ Annotations = manifest.Annotations ?? new Dictionary()
+ });
+ }
+ }
+
+ if (filter?.ArtifactType is not null)
+ {
+ referrers = referrers.Where(r => r.ArtifactType == filter.ArtifactType).ToList();
+ }
+
+ return new ReferrerListResult
+ {
+ IsSuccess = true,
+ Referrers = referrers,
+ SupportsReferrersApi = false
+ };
+ }
+
+ ///
+ /// Finds RVA attestations for an image.
+ ///
+ public async Task> FindRvaAttestationsAsync(
+ string registry, string repository, string imageDigest,
+ CancellationToken ct = default)
+ {
+ var result = await ListReferrersAsync(registry, repository, imageDigest,
+ new ReferrerFilterOptions { ArtifactType = OciArtifactTypes.RvaJson },
+ ct);
+
+ return result.IsSuccess ? result.Referrers : [];
+ }
+}
+
+public sealed record ReferrerListResult
+{
+ public required bool IsSuccess { get; init; }
+ public IReadOnlyList Referrers { get; init; } = [];
+ public bool SupportsReferrersApi { get; init; }
+ public string? Error { get; init; }
+}
+
+public sealed record ReferrerInfo
+{
+ public required string Digest { get; init; }
+ public string? ArtifactType { get; init; }
+ public string? MediaType { get; init; }
+ public long Size { get; init; }
+ public IReadOnlyDictionary Annotations { get; init; }
+ = new Dictionary();
+}
+
+public sealed record ReferrerFilterOptions
+{
+ public string? ArtifactType { get; init; }
+}
+
+public interface IOciReferrerDiscovery
+{
+ Task ListReferrersAsync(
+ string registry, string repository, string digest,
+ ReferrerFilterOptions? filter = null,
+ CancellationToken ct = default);
+
+ Task> FindRvaAttestationsAsync(
+ string registry, string repository, string imageDigest,
+ CancellationToken ct = default);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `OciReferrerDiscovery.cs` created
+- [ ] Referrers API (OCI 1.1+) supported
+- [ ] Fallback to tag-based discovery for older registries
+- [ ] Artifact type filtering
+- [ ] `FindRvaAttestationsAsync` convenience method
+- [ ] Interface for DI
+
+---
+
+### T3: Implement Fallback Strategy
+
+**Assignee**: ExportCenter Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T1, T2
+
+**Description**:
+Implement fallback to tagged index for registries without referrers API.
+
+**Implementation Path**: `Oci/OciReferrerFallback.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.ExportCenter.WebService.Distribution.Oci;
+
+///
+/// Fallback strategies for registries without native referrers API.
+///
+public sealed class OciReferrerFallback : IOciReferrerFallback
+{
+ private readonly IOciPushClient _pushClient;
+ private readonly ILogger _logger;
+
+ ///
+ /// Pushes an artifact with fallback tag for older registries.
+ ///
+ public async Task PushWithFallbackAsync(
+ OciPushRequest request,
+ FallbackOptions options,
+ CancellationToken ct = default)
+ {
+ // First, try native push with subject
+ var result = await _pushClient.PushArtifactAsync(request, ct);
+
+ if (!result.IsSuccess)
+ {
+ _logger.LogWarning("Native push failed: {Error}", result.Error);
+ return result;
+ }
+
+ // If subject was specified, also create fallback tag
+ if (request.SubjectDigest is not null && options.CreateFallbackTag)
+ {
+ await CreateFallbackTagAsync(
+ request.Registry, request.Repository,
+ request.SubjectDigest, result.Digest!,
+ ct);
+ }
+
+ return result;
+ }
+
+ private async Task CreateFallbackTagAsync(
+ string registry, string repository,
+ string subjectDigest, string referrerDigest,
+ CancellationToken ct)
+ {
+ // Create tag in format: sha256-{subject-hash}.{artifact-type}
+ var subjectHash = subjectDigest.Replace("sha256:", "");
+ var tag = $"sha256-{subjectHash}.rva";
+
+ _logger.LogDebug("Creating fallback tag {Tag} for referrer {Digest}",
+ tag, referrerDigest);
+
+ // Create index manifest pointing to the referrer
+ var index = new OciIndex
+ {
+ SchemaVersion = 2,
+ MediaType = OciMediaTypes.ImageIndex,
+ Manifests = new[]
+ {
+ new OciDescriptor
+ {
+ MediaType = OciMediaTypes.ImageManifest,
+ Digest = referrerDigest,
+ ArtifactType = OciArtifactTypes.RvaJson
+ }
+ }
+ };
+
+ // Push the index with the fallback tag
+ // ... implementation details ...
+ }
+
+ ///
+ /// Determines the best push strategy for a registry.
+ ///
+ public async Task ProbeCapabilitiesAsync(
+ string registry,
+ CancellationToken ct = default)
+ {
+ var capabilities = new RegistryCapabilities
+ {
+ Registry = registry
+ };
+
+ try
+ {
+ // Check OCI Distribution version
+ var response = await _httpClient.GetAsync($"https://{registry}/v2/", ct);
+ var version = response.Headers.TryGetValues("OCI-Distribution-Version", out var values)
+ ? values.FirstOrDefault()
+ : null;
+
+ capabilities.DistributionVersion = version;
+ capabilities.SupportsReferrersApi = version?.StartsWith("1.1") == true;
+
+ // Check if registry accepts artifact types
+ capabilities.SupportsArtifactType = await ProbeArtifactTypeAsync(registry, ct);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogWarning(ex, "Failed to probe capabilities for {Registry}", registry);
+ }
+
+ return capabilities;
+ }
+
+ private async Task ProbeArtifactTypeAsync(string registry, CancellationToken ct)
+ {
+ // Implementation: Try to push a test manifest with artifactType
+ // and see if it's accepted
+ return true; // Simplified
+ }
+}
+
+public sealed record FallbackOptions
+{
+ ///
+ /// Create a tagged index for registries without referrers API.
+ ///
+ public bool CreateFallbackTag { get; init; } = true;
+
+ ///
+ /// Tag format template. {subject} and {type} are replaced.
+ ///
+ public string TagTemplate { get; init; } = "sha256-{subject}.{type}";
+}
+
+public sealed record RegistryCapabilities
+{
+ public required string Registry { get; init; }
+ public string? DistributionVersion { get; init; }
+ public bool SupportsReferrersApi { get; init; }
+ public bool SupportsArtifactType { get; init; }
+}
+
+public interface IOciReferrerFallback
+{
+ Task PushWithFallbackAsync(OciPushRequest request, FallbackOptions options, CancellationToken ct = default);
+ Task ProbeCapabilitiesAsync(string registry, CancellationToken ct = default);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `OciReferrerFallback.cs` created
+- [ ] Fallback tag creation for older registries
+- [ ] Registry capability probing
+- [ ] Configurable tag template
+- [ ] Logging for strategy selection
+
+---
+
+### T4: Register Artifact Types
+
+**Assignee**: ExportCenter Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Define and register StellaOps artifact type constants.
+
+**Implementation Path**: `Oci/OciArtifactTypes.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.ExportCenter.WebService.Distribution.Oci;
+
+///
+/// OCI artifact types for StellaOps attestations.
+///
+public static class OciArtifactTypes
+{
+ ///
+ /// Risk Verdict Attestation (JSON).
+ ///
+ public const string RvaJson = "application/vnd.stellaops.rva+json";
+
+ ///
+ /// Risk Verdict Attestation (DSSE envelope).
+ ///
+ public const string RvaDsse = "application/vnd.stellaops.rva.dsse+json";
+
+ ///
+ /// SBOM (CycloneDX JSON).
+ ///
+ public const string SbomCyclonedx = "application/vnd.cyclonedx+json";
+
+ ///
+ /// SBOM (SPDX JSON).
+ ///
+ public const string SbomSpdx = "application/spdx+json";
+
+ ///
+ /// VEX document (OpenVEX).
+ ///
+ public const string VexOpenvex = "application/vnd.openvex+json";
+
+ ///
+ /// Knowledge snapshot manifest.
+ ///
+ public const string KnowledgeSnapshot = "application/vnd.stellaops.knowledge-snapshot+json";
+
+ ///
+ /// Policy bundle.
+ ///
+ public const string PolicyBundle = "application/vnd.stellaops.policy+json";
+
+ ///
+ /// In-toto statement (generic).
+ ///
+ public const string InTotoStatement = "application/vnd.in-toto+json";
+
+ ///
+ /// Gets the artifact type for an RVA based on format.
+ ///
+ public static string GetRvaType(bool isSigned) =>
+ isSigned ? RvaDsse : RvaJson;
+}
+
+///
+/// Standard OCI media types.
+///
+public static class OciMediaTypes
+{
+ public const string ImageManifest = "application/vnd.oci.image.manifest.v1+json";
+ public const string ImageIndex = "application/vnd.oci.image.index.v1+json";
+ public const string ImageConfig = "application/vnd.oci.image.config.v1+json";
+ public const string EmptyConfig = "application/vnd.oci.empty.v1+json";
+ public const string ImageLayer = "application/vnd.oci.image.layer.v1.tar+gzip";
+}
+
+///
+/// Standard OCI annotation keys.
+///
+public static class OciAnnotations
+{
+ public const string CreatedAt = "org.opencontainers.image.created";
+ public const string Authors = "org.opencontainers.image.authors";
+ public const string Description = "org.opencontainers.image.description";
+ public const string Title = "org.opencontainers.image.title";
+
+ // StellaOps custom annotations
+ public const string RvaId = "ops.stella.rva.id";
+ public const string RvaVerdict = "ops.stella.rva.verdict";
+ public const string RvaPolicy = "ops.stella.rva.policy";
+ public const string RvaSnapshot = "ops.stella.rva.snapshot";
+ public const string RvaExpires = "ops.stella.rva.expires";
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `OciArtifactTypes.cs` created
+- [ ] RVA types: JSON and DSSE
+- [ ] SBOM types: CycloneDX, SPDX
+- [ ] VEX type: OpenVEX
+- [ ] Standard OCI media types
+- [ ] StellaOps custom annotations
+
+---
+
+### T5: Add Registry Config
+
+**Assignee**: ExportCenter Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Add configuration for registry authentication and TLS.
+
+**Implementation Path**: `Oci/OciRegistryConfig.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.ExportCenter.WebService.Distribution.Oci;
+
+///
+/// Configuration for OCI registry connections.
+///
+public sealed class OciRegistryConfig
+{
+ ///
+ /// Default registry (e.g., docker.io, ghcr.io).
+ ///
+ public string? DefaultRegistry { get; set; }
+
+ ///
+ /// Registry-specific configurations keyed by hostname.
+ ///
+ public Dictionary Registries { get; set; } = new();
+
+ ///
+ /// Global settings applied to all registries.
+ ///
+ public RegistryGlobalSettings Global { get; set; } = new();
+}
+
+public sealed class RegistryEndpointConfig
+{
+ ///
+ /// Registry hostname (e.g., "gcr.io", "registry.example.com").
+ ///
+ public required string Host { get; set; }
+
+ ///
+ /// Authentication method.
+ ///
+ public RegistryAuthMethod AuthMethod { get; set; } = RegistryAuthMethod.Anonymous;
+
+ ///
+ /// Username for basic auth.
+ ///
+ public string? Username { get; set; }
+
+ ///
+ /// Password or token for basic auth.
+ ///
+ public string? Password { get; set; }
+
+ ///
+ /// Path to credentials file (e.g., Docker config.json).
+ ///
+ public string? CredentialsFile { get; set; }
+
+ ///
+ /// OAuth2/OIDC token endpoint.
+ ///
+ public string? TokenEndpoint { get; set; }
+
+ ///
+ /// TLS configuration.
+ ///
+ public RegistryTlsConfig? Tls { get; set; }
+
+ ///
+ /// Use HTTP instead of HTTPS (insecure, for local dev only).
+ ///
+ public bool Insecure { get; set; }
+}
+
+public sealed class RegistryTlsConfig
+{
+ ///
+ /// Path to CA certificate bundle.
+ ///
+ public string? CaCertPath { get; set; }
+
+ ///
+ /// Path to client certificate (for mTLS).
+ ///
+ public string? ClientCertPath { get; set; }
+
+ ///
+ /// Path to client key (for mTLS).
+ ///
+ public string? ClientKeyPath { get; set; }
+
+ ///
+ /// Skip certificate verification (insecure).
+ ///
+ public bool SkipVerify { get; set; }
+}
+
+public sealed class RegistryGlobalSettings
+{
+ ///
+ /// Timeout for registry operations.
+ ///
+ public TimeSpan Timeout { get; set; } = TimeSpan.FromMinutes(5);
+
+ ///
+ /// Retry count for failed operations.
+ ///
+ public int RetryCount { get; set; } = 3;
+
+ ///
+ /// User agent string.
+ ///
+ public string UserAgent { get; set; } = "StellaOps/1.0";
+
+ ///
+ /// Enable referrers API fallback.
+ ///
+ public bool EnableReferrersFallback { get; set; } = true;
+}
+
+public enum RegistryAuthMethod
+{
+ Anonymous,
+ Basic,
+ Bearer,
+ DockerConfig,
+ Oidc,
+ AwsEcr,
+ GcpGcr,
+ AzureAcr
+}
+
+///
+/// Factory for creating configured HTTP clients.
+///
+public sealed class OciHttpClientFactory
+{
+ private readonly OciRegistryConfig _config;
+
+ public HttpClient CreateClient(string registry)
+ {
+ var endpointConfig = GetEndpointConfig(registry);
+ var handler = CreateHandler(endpointConfig);
+
+ var client = new HttpClient(handler)
+ {
+ Timeout = _config.Global.Timeout
+ };
+
+ client.DefaultRequestHeaders.UserAgent.ParseAdd(_config.Global.UserAgent);
+
+ return client;
+ }
+
+ private RegistryEndpointConfig GetEndpointConfig(string registry)
+ {
+ if (_config.Registries.TryGetValue(registry, out var config))
+ return config;
+
+ // Return default config
+ return new RegistryEndpointConfig { Host = registry };
+ }
+
+ private HttpClientHandler CreateHandler(RegistryEndpointConfig config)
+ {
+ var handler = new HttpClientHandler();
+
+ if (config.Tls?.SkipVerify == true)
+ {
+ handler.ServerCertificateCustomValidationCallback =
+ HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
+ }
+
+ if (config.Tls?.CaCertPath is not null)
+ {
+ // Load custom CA certificate
+ // ... implementation ...
+ }
+
+ return handler;
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `OciRegistryConfig.cs` created
+- [ ] Per-registry configuration
+- [ ] Multiple auth methods supported
+- [ ] TLS/mTLS configuration
+- [ ] Global timeout and retry settings
+- [ ] HTTP client factory
+
+---
+
+### T6: Integrate with RVA Flow
+
+**Assignee**: ExportCenter Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T1, T4
+
+**Description**:
+Auto-push RVA to registry on verdict creation.
+
+**Implementation Path**: `Oci/RvaOciPublisher.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.ExportCenter.WebService.Distribution.Oci;
+
+///
+/// Publishes Risk Verdict Attestations to OCI registries.
+///
+public sealed class RvaOciPublisher : IRvaOciPublisher
+{
+ private readonly IOciPushClient _pushClient;
+ private readonly IOciReferrerFallback _fallback;
+ private readonly ISigner _signer;
+ private readonly ILogger _logger;
+
+ public RvaOciPublisher(
+ IOciPushClient pushClient,
+ IOciReferrerFallback fallback,
+ ISigner signer,
+ ILogger logger)
+ {
+ _pushClient = pushClient;
+ _fallback = fallback;
+ _signer = signer;
+ _logger = logger;
+ }
+
+ ///
+ /// Publishes an RVA as an OCI artifact attached to the subject image.
+ ///
+ public async Task PublishAsync(
+ RiskVerdictAttestation attestation,
+ RvaPublishOptions options,
+ CancellationToken ct = default)
+ {
+ _logger.LogInformation(
+ "Publishing RVA {AttestationId} to {Registry}/{Repository}",
+ attestation.AttestationId, options.Registry, options.Repository);
+
+ try
+ {
+ // Sign the attestation
+ var statement = RvaPredicate.CreateStatement(attestation);
+ var envelope = await SignStatementAsync(statement, ct);
+
+ // Prepare push request
+ var request = new OciPushRequest
+ {
+ Registry = options.Registry,
+ Repository = options.Repository,
+ Content = Encoding.UTF8.GetBytes(envelope),
+ ContentMediaType = OciArtifactTypes.RvaDsse,
+ SubjectDigest = attestation.Subject.Digest,
+ Annotations = CreateAnnotations(attestation),
+ ManifestAnnotations = new Dictionary
+ {
+ [OciAnnotations.CreatedAt] = attestation.CreatedAt.ToString("o"),
+ [OciAnnotations.Title] = $"RVA for {attestation.Subject.Name}"
+ }
+ };
+
+ // Push with fallback support
+ var result = await _fallback.PushWithFallbackAsync(request,
+ new FallbackOptions { CreateFallbackTag = options.CreateFallbackTag },
+ ct);
+
+ if (!result.IsSuccess)
+ {
+ return new RvaPublishResult
+ {
+ IsSuccess = false,
+ Error = result.Error
+ };
+ }
+
+ _logger.LogInformation(
+ "Published RVA {AttestationId} as {Digest}",
+ attestation.AttestationId, result.Digest);
+
+ return new RvaPublishResult
+ {
+ IsSuccess = true,
+ AttestationId = attestation.AttestationId,
+ ArtifactDigest = result.Digest,
+ Registry = options.Registry,
+ Repository = options.Repository,
+ ReferrerUri = $"{options.Registry}/{options.Repository}@{result.Digest}"
+ };
+ }
+ catch (Exception ex)
+ {
+ _logger.LogError(ex, "Failed to publish RVA {AttestationId}",
+ attestation.AttestationId);
+
+ return new RvaPublishResult
+ {
+ IsSuccess = false,
+ Error = ex.Message
+ };
+ }
+ }
+
+ private async Task SignStatementAsync(InTotoStatement statement, CancellationToken ct)
+ {
+ var payload = JsonSerializer.Serialize(statement, new JsonSerializerOptions
+ {
+ PropertyNamingPolicy = JsonNamingPolicy.CamelCase
+ });
+
+ var payloadBytes = Encoding.UTF8.GetBytes(payload);
+ var signature = await _signer.SignAsync(payloadBytes, ct);
+
+ var envelope = new DsseEnvelope
+ {
+ PayloadType = "application/vnd.in-toto+json",
+ Payload = Convert.ToBase64String(payloadBytes),
+ Signatures = new[]
+ {
+ new DsseSignature
+ {
+ KeyId = _signer.KeyId,
+ Sig = Convert.ToBase64String(signature)
+ }
+ }
+ };
+
+ return JsonSerializer.Serialize(envelope);
+ }
+
+ private static IReadOnlyDictionary CreateAnnotations(
+ RiskVerdictAttestation attestation)
+ {
+ var annotations = new Dictionary
+ {
+ [OciAnnotations.RvaId] = attestation.AttestationId,
+ [OciAnnotations.RvaVerdict] = attestation.Verdict.ToString(),
+ [OciAnnotations.RvaPolicy] = attestation.Policy.PolicyId,
+ [OciAnnotations.RvaSnapshot] = attestation.KnowledgeSnapshotId
+ };
+
+ if (attestation.ExpiresAt.HasValue)
+ {
+ annotations[OciAnnotations.RvaExpires] = attestation.ExpiresAt.Value.ToString("o");
+ }
+
+ return annotations;
+ }
+}
+
+public sealed record RvaPublishOptions
+{
+ public required string Registry { get; init; }
+ public required string Repository { get; init; }
+ public bool CreateFallbackTag { get; init; } = true;
+ public bool SignAttestation { get; init; } = true;
+}
+
+public sealed record RvaPublishResult
+{
+ public required bool IsSuccess { get; init; }
+ public string? AttestationId { get; init; }
+ public string? ArtifactDigest { get; init; }
+ public string? Registry { get; init; }
+ public string? Repository { get; init; }
+ public string? ReferrerUri { get; init; }
+ public string? Error { get; init; }
+}
+
+public interface IRvaOciPublisher
+{
+ Task PublishAsync(
+ RiskVerdictAttestation attestation,
+ RvaPublishOptions options,
+ CancellationToken ct = default);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `RvaOciPublisher.cs` created
+- [ ] RVA signing to DSSE envelope
+- [ ] Push with subject binding
+- [ ] Custom annotations on artifacts
+- [ ] Fallback tag support
+- [ ] Complete publish result with URI
+
+---
+
+### T7: Add Tests
+
+**Assignee**: ExportCenter Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T6
+
+**Description**:
+Add mock registry integration tests.
+
+**Implementation Path**: `src/ExportCenter/__Tests/StellaOps.ExportCenter.Tests/Distribution/Oci/`
+
+**Test Cases**:
+```csharp
+public class OciPushClientTests
+{
+ [Fact]
+ public async Task PushArtifact_ValidRequest_Succeeds()
+ {
+ // Arrange
+ var mockHandler = CreateMockHandler(HttpStatusCode.Created);
+ var client = new OciPushClient(new HttpClient(mockHandler), _mockAuth.Object, _logger);
+
+ var request = new OciPushRequest
+ {
+ Registry = "registry.example.com",
+ Repository = "myapp",
+ Content = "test content"u8.ToArray(),
+ ContentMediaType = OciArtifactTypes.RvaJson,
+ SubjectDigest = "sha256:abc123"
+ };
+
+ // Act
+ var result = await client.PushArtifactAsync(request);
+
+ // Assert
+ result.IsSuccess.Should().BeTrue();
+ result.Digest.Should().StartWith("sha256:");
+ }
+
+ [Fact]
+ public async Task PushArtifact_AuthFailure_ReturnsError()
+ {
+ var mockHandler = CreateMockHandler(HttpStatusCode.Unauthorized);
+ var client = new OciPushClient(new HttpClient(mockHandler), _mockAuth.Object, _logger);
+
+ var request = CreateRequest();
+ var result = await client.PushArtifactAsync(request);
+
+ result.IsSuccess.Should().BeFalse();
+ result.Error.Should().Contain("401");
+ }
+}
+
+public class OciReferrerDiscoveryTests
+{
+ [Fact]
+ public async Task ListReferrers_WithReferrersApi_ReturnsResults()
+ {
+ var mockHandler = CreateReferrersApiHandler(manifests: new[]
+ {
+ new OciDescriptor { Digest = "sha256:rva1", ArtifactType = OciArtifactTypes.RvaJson }
+ });
+ var discovery = new OciReferrerDiscovery(new HttpClient(mockHandler), _mockAuth.Object, _logger);
+
+ var result = await discovery.ListReferrersAsync("registry.example.com", "myapp", "sha256:image");
+
+ result.IsSuccess.Should().BeTrue();
+ result.Referrers.Should().HaveCount(1);
+ result.SupportsReferrersApi.Should().BeTrue();
+ }
+
+ [Fact]
+ public async Task ListReferrers_FallbackToTags_ReturnsResults()
+ {
+ var mockHandler = CreateFallbackHandler(tags: new[] { "sha256-image.rva" });
+ var discovery = new OciReferrerDiscovery(new HttpClient(mockHandler), _mockAuth.Object, _logger);
+
+ var result = await discovery.ListReferrersAsync("registry.example.com", "myapp", "sha256:image");
+
+ result.IsSuccess.Should().BeTrue();
+ result.SupportsReferrersApi.Should().BeFalse();
+ }
+
+ [Fact]
+ public async Task FindRvaAttestations_FiltersCorrectly()
+ {
+ var mockHandler = CreateReferrersApiHandler(manifests: new[]
+ {
+ new OciDescriptor { Digest = "sha256:rva1", ArtifactType = OciArtifactTypes.RvaJson },
+ new OciDescriptor { Digest = "sha256:sbom", ArtifactType = OciArtifactTypes.SbomCyclonedx }
+ });
+ var discovery = new OciReferrerDiscovery(new HttpClient(mockHandler), _mockAuth.Object, _logger);
+
+ var results = await discovery.FindRvaAttestationsAsync("registry.example.com", "myapp", "sha256:image");
+
+ results.Should().HaveCount(1);
+ results[0].ArtifactType.Should().Be(OciArtifactTypes.RvaJson);
+ }
+}
+
+public class RvaOciPublisherTests
+{
+ [Fact]
+ public async Task Publish_ValidRva_CreatesReferrer()
+ {
+ var rva = CreateRva();
+ var options = new RvaPublishOptions
+ {
+ Registry = "registry.example.com",
+ Repository = "myapp"
+ };
+
+ var result = await _publisher.PublishAsync(rva, options);
+
+ result.IsSuccess.Should().BeTrue();
+ result.ArtifactDigest.Should().NotBeNullOrEmpty();
+ result.ReferrerUri.Should().Contain("registry.example.com/myapp@");
+ }
+
+ [Fact]
+ public async Task Publish_SetsCorrectAnnotations()
+ {
+ var rva = CreateRva(verdict: RiskVerdictStatus.Pass);
+
+ await _publisher.PublishAsync(rva, CreateOptions());
+
+ // Verify mock received correct annotations
+ _mockPushClient.Verify(c => c.PushArtifactAsync(
+ It.Is(r =>
+ r.Annotations![OciAnnotations.RvaVerdict] == "Pass"),
+ It.IsAny()));
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] Push client tests with mock handler
+- [ ] Auth failure handling test
+- [ ] Referrer discovery tests (API and fallback)
+- [ ] RVA filtering test
+- [ ] Publisher integration test
+- [ ] All 4+ tests pass
+
+---
+
+## Delivery Tracker
+
+| # | Task ID | Status | Dependency | Owners | Task Definition |
+|---|---------|--------|------------|--------|-----------------|
+| 1 | T1 | TODO | — | ExportCenter Team | Implement OCI push client |
+| 2 | T2 | TODO | T1 | ExportCenter Team | Add referrer discovery |
+| 3 | T3 | TODO | T1, T2 | ExportCenter Team | Implement fallback strategy |
+| 4 | T4 | TODO | — | ExportCenter Team | Register artifact types |
+| 5 | T5 | TODO | T1 | ExportCenter Team | Add registry config |
+| 6 | T6 | TODO | T1, T4 | ExportCenter Team | Integrate with RVA flow |
+| 7 | T7 | TODO | T6 | ExportCenter Team | Add tests |
+
+---
+
+## Execution Log
+
+| Date (UTC) | Update | Owner |
+|------------|--------|-------|
+| 2025-12-21 | Sprint created from MOAT Phase 2 gap analysis. OCI referrer push identified as requirement from Moat #2 advisory. | Claude |
+
+---
+
+## Decisions & Risks
+
+| Item | Type | Owner | Notes |
+|------|------|-------|-------|
+| Referrers API first | Decision | ExportCenter Team | Try OCI 1.1 referrers API, fallback to tags |
+| DSSE envelope | Decision | ExportCenter Team | Sign RVA with DSSE for in-toto compatibility |
+| Custom annotations | Decision | ExportCenter Team | ops.stella.* prefix for StellaOps annotations |
+| Fallback tag format | Decision | ExportCenter Team | sha256-{subject-hash}.rva for discovery |
+
+---
+
+## Success Criteria
+
+- [ ] All 7 tasks marked DONE
+- [ ] RVA can be pushed to OCI registries
+- [ ] Referrers API and fallback work
+- [ ] Discovery finds attached RVAs
+- [ ] Registry config supports auth methods
+- [ ] 4+ integration tests passing
+- [ ] `dotnet build` succeeds
+- [ ] `dotnet test` succeeds
diff --git a/docs/implplan/SPRINT_4100_0004_0001_security_state_delta.md b/docs/implplan/SPRINT_4100_0004_0001_security_state_delta.md
new file mode 100644
index 000000000..a660dd4fb
--- /dev/null
+++ b/docs/implplan/SPRINT_4100_0004_0001_security_state_delta.md
@@ -0,0 +1,1434 @@
+# Sprint 4100.0004.0001 · Security State Delta & Verdict
+
+## Topic & Scope
+
+- Define security state delta model comparing baseline vs target
+- Implement delta computation across SBOM, reachability, VEX, policy
+- Create signed delta verdict attestation
+
+**Working directory:** `src/Policy/__Libraries/StellaOps.Policy/Deltas/`
+
+## Dependencies & Concurrency
+
+- **Upstream**: Sprint 4100.0002.0001 (Knowledge Snapshot Manifest) — MUST BE DONE
+- **Downstream**: None
+- **Safe to parallelize with**: Sprint 4100.0001.0003, Sprint 4100.0002.0003
+
+## Documentation Prerequisites
+
+- Sprint 4100.0002.0001 completion (KnowledgeSnapshotManifest)
+- `docs/product-advisories/19-Dec-2025 - Moat #1.md` (Security Delta as Governance Unit)
+- `docs/product-advisories/20-Dec-2025 - Moat Explanation - Risk Budgets and Diff-Aware Release Gates.md`
+
+---
+
+## Tasks
+
+### T1: Define SecurityStateDelta Model
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: —
+
+**Description**:
+Create the unified delta model comparing baseline and target security states.
+
+**Implementation Path**: `Deltas/SecurityStateDelta.cs` (new file)
+
+**Model Definition**:
+```csharp
+namespace StellaOps.Policy.Deltas;
+
+///
+/// Represents the delta between two security states (baseline vs target).
+/// This is the atomic unit of governance for release decisions.
+///
+public sealed record SecurityStateDelta
+{
+ ///
+ /// Unique identifier for this delta.
+ /// Format: delta:sha256:{hash}
+ ///
+ public required string DeltaId { get; init; }
+
+ ///
+ /// When this delta was computed.
+ ///
+ public required DateTimeOffset ComputedAt { get; init; }
+
+ ///
+ /// Knowledge snapshot ID of the baseline state.
+ ///
+ public required string BaselineSnapshotId { get; init; }
+
+ ///
+ /// Knowledge snapshot ID of the target state.
+ ///
+ public required string TargetSnapshotId { get; init; }
+
+ ///
+ /// Artifact being evaluated.
+ ///
+ public required ArtifactRef Artifact { get; init; }
+
+ ///
+ /// SBOM differences.
+ ///
+ public required SbomDelta Sbom { get; init; }
+
+ ///
+ /// Reachability differences.
+ ///
+ public required ReachabilityDelta Reachability { get; init; }
+
+ ///
+ /// VEX coverage differences.
+ ///
+ public required VexDelta Vex { get; init; }
+
+ ///
+ /// Policy evaluation differences.
+ ///
+ public required PolicyDelta Policy { get; init; }
+
+ ///
+ /// Unknowns differences.
+ ///
+ public required UnknownsDelta Unknowns { get; init; }
+
+ ///
+ /// Findings that drive the verdict.
+ ///
+ public IReadOnlyList Drivers { get; init; } = [];
+
+ ///
+ /// Summary statistics.
+ ///
+ public required DeltaSummary Summary { get; init; }
+}
+
+///
+/// Reference to the artifact being evaluated.
+///
+public sealed record ArtifactRef(
+ string Digest,
+ string? Name,
+ string? Tag);
+
+///
+/// SBOM-level differences.
+///
+public sealed record SbomDelta
+{
+ public int PackagesAdded { get; init; }
+ public int PackagesRemoved { get; init; }
+ public int PackagesModified { get; init; }
+ public IReadOnlyList AddedPackages { get; init; } = [];
+ public IReadOnlyList RemovedPackages { get; init; } = [];
+ public IReadOnlyList VersionChanges { get; init; } = [];
+}
+
+public sealed record PackageChange(string Purl, string? License);
+public sealed record PackageVersionChange(string Purl, string OldVersion, string NewVersion);
+
+///
+/// Reachability analysis differences.
+///
+public sealed record ReachabilityDelta
+{
+ public int NewReachable { get; init; }
+ public int NewUnreachable { get; init; }
+ public int ChangedReachability { get; init; }
+ public IReadOnlyList Changes { get; init; } = [];
+}
+
+public sealed record ReachabilityChange(
+ string CveId,
+ string Purl,
+ bool WasReachable,
+ bool IsReachable);
+
+///
+/// VEX coverage differences.
+///
+public sealed record VexDelta
+{
+ public int NewVexStatements { get; init; }
+ public int RevokedVexStatements { get; init; }
+ public int CoverageIncrease { get; init; }
+ public int CoverageDecrease { get; init; }
+ public IReadOnlyList Changes { get; init; } = [];
+}
+
+public sealed record VexChange(
+ string CveId,
+ string? OldStatus,
+ string? NewStatus);
+
+///
+/// Policy evaluation differences.
+///
+public sealed record PolicyDelta
+{
+ public int NewViolations { get; init; }
+ public int ResolvedViolations { get; init; }
+ public int PolicyVersionChanged { get; init; }
+ public IReadOnlyList Changes { get; init; } = [];
+}
+
+public sealed record PolicyChange(
+ string RuleId,
+ string ChangeType,
+ string? Description);
+
+///
+/// Unknowns differences.
+///
+public sealed record UnknownsDelta
+{
+ public int NewUnknowns { get; init; }
+ public int ResolvedUnknowns { get; init; }
+ public int TotalBaselineUnknowns { get; init; }
+ public int TotalTargetUnknowns { get; init; }
+ public IReadOnlyDictionary ByReasonCode { get; init; }
+ = new Dictionary();
+}
+
+///
+/// A finding that drives the delta verdict.
+///
+public sealed record DeltaDriver
+{
+ public required string Type { get; init; } // "new-cve", "reachability-change", etc.
+ public required DeltaDriverSeverity Severity { get; init; }
+ public required string Description { get; init; }
+ public string? CveId { get; init; }
+ public string? Purl { get; init; }
+ public IReadOnlyDictionary Details { get; init; }
+ = new Dictionary();
+}
+
+public enum DeltaDriverSeverity
+{
+ Low,
+ Medium,
+ High,
+ Critical
+}
+
+///
+/// Summary statistics for the delta.
+///
+public sealed record DeltaSummary
+{
+ public int TotalChanges { get; init; }
+ public int RiskIncreasing { get; init; }
+ public int RiskDecreasing { get; init; }
+ public int Neutral { get; init; }
+ public decimal RiskScore { get; init; }
+ public string RiskDirection { get; init; } = "stable"; // "increasing", "decreasing", "stable"
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `SecurityStateDelta.cs` created with all models
+- [ ] SBOM, Reachability, VEX, Policy, Unknowns deltas defined
+- [ ] DeltaDriver for verdict justification
+- [ ] Summary statistics with risk direction
+- [ ] Content-addressed delta ID
+
+---
+
+### T2: Define DeltaVerdict Model
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Create the verdict model for security state deltas.
+
+**Implementation Path**: `Deltas/DeltaVerdict.cs` (new file)
+
+**Model Definition**:
+```csharp
+namespace StellaOps.Policy.Deltas;
+
+///
+/// Verdict for a security state delta.
+/// Determines whether a change should be allowed to proceed.
+///
+public sealed record DeltaVerdict
+{
+ ///
+ /// Unique identifier for this verdict.
+ ///
+ public required string VerdictId { get; init; }
+
+ ///
+ /// Reference to the delta being evaluated.
+ ///
+ public required string DeltaId { get; init; }
+
+ ///
+ /// When this verdict was rendered.
+ ///
+ public required DateTimeOffset EvaluatedAt { get; init; }
+
+ ///
+ /// The verdict outcome.
+ ///
+ public required DeltaVerdictStatus Status { get; init; }
+
+ ///
+ /// Recommended gate level based on delta risk.
+ ///
+ public GateLevel RecommendedGate { get; init; }
+
+ ///
+ /// Risk points consumed by this change.
+ ///
+ public int RiskPoints { get; init; }
+
+ ///
+ /// Drivers that contributed to the verdict.
+ ///
+ public IReadOnlyList BlockingDrivers { get; init; } = [];
+
+ ///
+ /// Drivers that raised warnings but didn't block.
+ ///
+ public IReadOnlyList WarningDrivers { get; init; } = [];
+
+ ///
+ /// Applied exceptions that allowed blocking drivers.
+ ///
+ public IReadOnlyList AppliedExceptions { get; init; } = [];
+
+ ///
+ /// Human-readable explanation.
+ ///
+ public string? Explanation { get; init; }
+
+ ///
+ /// Recommendations for addressing issues.
+ ///
+ public IReadOnlyList Recommendations { get; init; } = [];
+}
+
+///
+/// Possible verdict outcomes for a delta.
+///
+public enum DeltaVerdictStatus
+{
+ ///
+ /// Delta is safe to proceed.
+ ///
+ Pass,
+
+ ///
+ /// Delta has warnings but can proceed.
+ ///
+ Warn,
+
+ ///
+ /// Delta should not proceed without remediation.
+ ///
+ Fail,
+
+ ///
+ /// Delta is blocked but covered by exceptions.
+ ///
+ PassWithExceptions
+}
+
+///
+/// Gate levels aligned with diff-aware release gates.
+///
+public enum GateLevel
+{
+ ///
+ /// G0: No-risk (docs, comments only).
+ ///
+ G0,
+
+ ///
+ /// G1: Low risk (unit tests, 1 review).
+ ///
+ G1,
+
+ ///
+ /// G2: Moderate risk (integration tests, code owner, canary).
+ ///
+ G2,
+
+ ///
+ /// G3: High risk (security scan, migration plan, release captain).
+ ///
+ G3,
+
+ ///
+ /// G4: Very high risk (formal review, extended canary, comms plan).
+ ///
+ G4
+}
+
+///
+/// Builder for delta verdicts.
+///
+public sealed class DeltaVerdictBuilder
+{
+ private DeltaVerdictStatus _status = DeltaVerdictStatus.Pass;
+ private GateLevel _gate = GateLevel.G1;
+ private int _riskPoints;
+ private readonly List _blockingDrivers = [];
+ private readonly List _warningDrivers = [];
+ private readonly List _exceptions = [];
+ private readonly List _recommendations = [];
+ private string? _explanation;
+
+ public DeltaVerdictBuilder WithStatus(DeltaVerdictStatus status)
+ {
+ _status = status;
+ return this;
+ }
+
+ public DeltaVerdictBuilder WithGate(GateLevel gate)
+ {
+ _gate = gate;
+ return this;
+ }
+
+ public DeltaVerdictBuilder WithRiskPoints(int points)
+ {
+ _riskPoints = points;
+ return this;
+ }
+
+ public DeltaVerdictBuilder AddBlockingDriver(DeltaDriver driver)
+ {
+ _blockingDrivers.Add(driver);
+ _status = DeltaVerdictStatus.Fail;
+ return this;
+ }
+
+ public DeltaVerdictBuilder AddWarningDriver(DeltaDriver driver)
+ {
+ _warningDrivers.Add(driver);
+ if (_status == DeltaVerdictStatus.Pass)
+ _status = DeltaVerdictStatus.Warn;
+ return this;
+ }
+
+ public DeltaVerdictBuilder AddException(string exceptionId)
+ {
+ _exceptions.Add(exceptionId);
+ return this;
+ }
+
+ public DeltaVerdictBuilder AddRecommendation(string recommendation)
+ {
+ _recommendations.Add(recommendation);
+ return this;
+ }
+
+ public DeltaVerdictBuilder WithExplanation(string explanation)
+ {
+ _explanation = explanation;
+ return this;
+ }
+
+ public DeltaVerdict Build(string deltaId)
+ {
+ // If all blocking drivers are excepted, change to PassWithExceptions
+ if (_status == DeltaVerdictStatus.Fail && _exceptions.Count >= _blockingDrivers.Count)
+ {
+ _status = DeltaVerdictStatus.PassWithExceptions;
+ }
+
+ return new DeltaVerdict
+ {
+ VerdictId = $"dv:{Guid.NewGuid():N}",
+ DeltaId = deltaId,
+ EvaluatedAt = DateTimeOffset.UtcNow,
+ Status = _status,
+ RecommendedGate = _gate,
+ RiskPoints = _riskPoints,
+ BlockingDrivers = _blockingDrivers.ToList(),
+ WarningDrivers = _warningDrivers.ToList(),
+ AppliedExceptions = _exceptions.ToList(),
+ Explanation = _explanation ?? GenerateExplanation(),
+ Recommendations = _recommendations.ToList()
+ };
+ }
+
+ private string GenerateExplanation()
+ {
+ return _status switch
+ {
+ DeltaVerdictStatus.Pass => "No blocking changes detected",
+ DeltaVerdictStatus.Warn => $"{_warningDrivers.Count} warning(s) detected",
+ DeltaVerdictStatus.Fail => $"{_blockingDrivers.Count} blocking issue(s) detected",
+ DeltaVerdictStatus.PassWithExceptions => $"Blocked by {_blockingDrivers.Count} issue(s), covered by exceptions",
+ _ => "Unknown status"
+ };
+ }
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `DeltaVerdict.cs` created
+- [ ] Four verdict statuses: Pass, Warn, Fail, PassWithExceptions
+- [ ] Gate level recommendation (G0-G4)
+- [ ] Risk points calculation
+- [ ] Blocking and warning drivers separated
+- [ ] Builder with auto-explanation
+
+---
+
+### T3: Implement DeltaComputer
+
+**Assignee**: Policy Team
+**Story Points**: 4
+**Status**: TODO
+**Dependencies**: T1, T2
+
+**Description**:
+Implement computation of deltas across all dimensions.
+
+**Implementation Path**: `Deltas/DeltaComputer.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.Deltas;
+
+///
+/// Computes security state deltas between baseline and target.
+///
+public sealed class DeltaComputer : IDeltaComputer
+{
+ private readonly ISnapshotService _snapshotService;
+ private readonly ISbomComparer _sbomComparer;
+ private readonly IReachabilityComparer _reachabilityComparer;
+ private readonly IVexComparer _vexComparer;
+ private readonly IPolicyComparer _policyComparer;
+ private readonly IHasher _hasher;
+ private readonly ILogger _logger;
+
+ public async Task ComputeDeltaAsync(
+ string baselineSnapshotId,
+ string targetSnapshotId,
+ ArtifactRef artifact,
+ CancellationToken ct = default)
+ {
+ _logger.LogInformation(
+ "Computing delta between {Baseline} and {Target}",
+ baselineSnapshotId, targetSnapshotId);
+
+ // Load snapshots
+ var baseline = await _snapshotService.GetSnapshotAsync(baselineSnapshotId, ct)
+ ?? throw new InvalidOperationException($"Baseline snapshot {baselineSnapshotId} not found");
+ var target = await _snapshotService.GetSnapshotAsync(targetSnapshotId, ct)
+ ?? throw new InvalidOperationException($"Target snapshot {targetSnapshotId} not found");
+
+ // Compute component deltas
+ var sbomDelta = await _sbomComparer.CompareAsync(baseline, target, ct);
+ var reachabilityDelta = await _reachabilityComparer.CompareAsync(baseline, target, ct);
+ var vexDelta = await _vexComparer.CompareAsync(baseline, target, ct);
+ var policyDelta = await _policyComparer.CompareAsync(baseline, target, ct);
+ var unknownsDelta = ComputeUnknownsDelta(baseline, target);
+
+ // Identify drivers
+ var drivers = IdentifyDrivers(sbomDelta, reachabilityDelta, vexDelta, policyDelta, unknownsDelta);
+
+ // Compute summary
+ var summary = ComputeSummary(sbomDelta, reachabilityDelta, vexDelta, policyDelta, drivers);
+
+ var delta = new SecurityStateDelta
+ {
+ DeltaId = "", // Computed below
+ ComputedAt = DateTimeOffset.UtcNow,
+ BaselineSnapshotId = baselineSnapshotId,
+ TargetSnapshotId = targetSnapshotId,
+ Artifact = artifact,
+ Sbom = sbomDelta,
+ Reachability = reachabilityDelta,
+ Vex = vexDelta,
+ Policy = policyDelta,
+ Unknowns = unknownsDelta,
+ Drivers = drivers,
+ Summary = summary
+ };
+
+ // Compute content-addressed ID
+ var deltaId = ComputeDeltaId(delta);
+
+ return delta with { DeltaId = deltaId };
+ }
+
+ private IReadOnlyList IdentifyDrivers(
+ SbomDelta sbom,
+ ReachabilityDelta reach,
+ VexDelta vex,
+ PolicyDelta policy,
+ UnknownsDelta unknowns)
+ {
+ var drivers = new List();
+
+ // New reachable CVEs are critical drivers
+ foreach (var change in reach.Changes.Where(c => !c.WasReachable && c.IsReachable))
+ {
+ drivers.Add(new DeltaDriver
+ {
+ Type = "new-reachable-cve",
+ Severity = DeltaDriverSeverity.Critical,
+ Description = $"CVE {change.CveId} is now reachable",
+ CveId = change.CveId,
+ Purl = change.Purl
+ });
+ }
+
+ // Lost VEX coverage
+ foreach (var change in vex.Changes.Where(c => c.OldStatus == "not_affected" && c.NewStatus is null))
+ {
+ drivers.Add(new DeltaDriver
+ {
+ Type = "lost-vex-coverage",
+ Severity = DeltaDriverSeverity.High,
+ Description = $"VEX coverage lost for {change.CveId}",
+ CveId = change.CveId
+ });
+ }
+
+ // New policy violations
+ foreach (var change in policy.Changes.Where(c => c.ChangeType == "new-violation"))
+ {
+ drivers.Add(new DeltaDriver
+ {
+ Type = "new-policy-violation",
+ Severity = DeltaDriverSeverity.High,
+ Description = change.Description ?? $"New violation of rule {change.RuleId}"
+ });
+ }
+
+ // New high-risk packages
+ foreach (var pkg in sbom.AddedPackages.Where(IsHighRiskPackage))
+ {
+ drivers.Add(new DeltaDriver
+ {
+ Type = "high-risk-package-added",
+ Severity = DeltaDriverSeverity.Medium,
+ Description = $"New high-risk package: {pkg.Purl}",
+ Purl = pkg.Purl
+ });
+ }
+
+ // Increased unknowns
+ if (unknowns.NewUnknowns > 0)
+ {
+ drivers.Add(new DeltaDriver
+ {
+ Type = "new-unknowns",
+ Severity = DeltaDriverSeverity.Medium,
+ Description = $"{unknowns.NewUnknowns} new unknown(s) introduced",
+ Details = unknowns.ByReasonCode.ToDictionary(kv => kv.Key, kv => kv.Value.ToString())
+ });
+ }
+
+ return drivers.OrderByDescending(d => d.Severity).ToList();
+ }
+
+ private DeltaSummary ComputeSummary(
+ SbomDelta sbom,
+ ReachabilityDelta reach,
+ VexDelta vex,
+ PolicyDelta policy,
+ IReadOnlyList drivers)
+ {
+ var totalChanges = sbom.PackagesAdded + sbom.PackagesRemoved +
+ reach.NewReachable + reach.NewUnreachable +
+ vex.NewVexStatements + vex.RevokedVexStatements +
+ policy.NewViolations + policy.ResolvedViolations;
+
+ var riskIncreasing = drivers.Count(d =>
+ d.Severity is DeltaDriverSeverity.Critical or DeltaDriverSeverity.High);
+ var riskDecreasing = reach.NewUnreachable + vex.NewVexStatements + policy.ResolvedViolations;
+ var neutral = totalChanges - riskIncreasing - riskDecreasing;
+
+ var riskScore = ComputeRiskScore(drivers);
+ var riskDirection = riskIncreasing > riskDecreasing ? "increasing" :
+ riskIncreasing < riskDecreasing ? "decreasing" : "stable";
+
+ return new DeltaSummary
+ {
+ TotalChanges = totalChanges,
+ RiskIncreasing = riskIncreasing,
+ RiskDecreasing = riskDecreasing,
+ Neutral = neutral,
+ RiskScore = riskScore,
+ RiskDirection = riskDirection
+ };
+ }
+
+ private decimal ComputeRiskScore(IReadOnlyList drivers)
+ {
+ return drivers.Sum(d => d.Severity switch
+ {
+ DeltaDriverSeverity.Critical => 20m,
+ DeltaDriverSeverity.High => 10m,
+ DeltaDriverSeverity.Medium => 5m,
+ DeltaDriverSeverity.Low => 1m,
+ _ => 0m
+ });
+ }
+
+ private static bool IsHighRiskPackage(PackageChange pkg)
+ {
+ // Simplified: Check for known high-risk characteristics
+ return pkg.Purl.Contains("native") || pkg.Purl.Contains("crypto");
+ }
+
+ private string ComputeDeltaId(SecurityStateDelta delta)
+ {
+ var json = JsonSerializer.Serialize(delta with { DeltaId = "" },
+ new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase });
+ var hash = _hasher.ComputeSha256(json);
+ return $"delta:sha256:{hash}";
+ }
+}
+
+public interface IDeltaComputer
+{
+ Task ComputeDeltaAsync(
+ string baselineSnapshotId,
+ string targetSnapshotId,
+ ArtifactRef artifact,
+ CancellationToken ct = default);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `DeltaComputer.cs` created
+- [ ] Snapshot loading and comparison
+- [ ] SBOM, reachability, VEX, policy comparers used
+- [ ] Drivers identified by severity
+- [ ] Summary statistics computed
+- [ ] Risk score and direction calculated
+- [ ] Content-addressed delta ID
+
+---
+
+### T4: Implement BaselineSelector
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T1
+
+**Description**:
+Implement selection of appropriate baseline for delta comparison.
+
+**Implementation Path**: `Deltas/BaselineSelector.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.Deltas;
+
+///
+/// Selects the appropriate baseline for delta comparison.
+///
+public sealed class BaselineSelector : IBaselineSelector
+{
+ private readonly ISnapshotStore _snapshotStore;
+ private readonly IVerdictStore _verdictStore;
+ private readonly ILogger _logger;
+
+ ///
+ /// Selects a baseline snapshot for the given artifact.
+ ///
+ public async Task SelectBaselineAsync(
+ string artifactDigest,
+ BaselineSelectionStrategy strategy,
+ CancellationToken ct = default)
+ {
+ _logger.LogDebug("Selecting baseline for {Artifact} using strategy {Strategy}",
+ artifactDigest, strategy);
+
+ return strategy switch
+ {
+ BaselineSelectionStrategy.PreviousBuild => await SelectPreviousBuildAsync(artifactDigest, ct),
+ BaselineSelectionStrategy.LastApproved => await SelectLastApprovedAsync(artifactDigest, ct),
+ BaselineSelectionStrategy.ProductionDeployed => await SelectProductionAsync(artifactDigest, ct),
+ BaselineSelectionStrategy.BranchBase => await SelectBranchBaseAsync(artifactDigest, ct),
+ BaselineSelectionStrategy.Explicit => throw new ArgumentException("Explicit strategy requires baseline ID"),
+ _ => throw new ArgumentOutOfRangeException(nameof(strategy))
+ };
+ }
+
+ ///
+ /// Selects a baseline with an explicit snapshot ID.
+ ///
+ public async Task SelectExplicitAsync(
+ string baselineSnapshotId,
+ CancellationToken ct = default)
+ {
+ var snapshot = await _snapshotStore.GetAsync(baselineSnapshotId, ct);
+ if (snapshot is null)
+ {
+ return BaselineSelectionResult.NotFound($"Snapshot {baselineSnapshotId} not found");
+ }
+
+ return BaselineSelectionResult.Success(snapshot, BaselineSelectionStrategy.Explicit);
+ }
+
+ private async Task SelectPreviousBuildAsync(
+ string artifactDigest, CancellationToken ct)
+ {
+ // Find the most recent verdict for this artifact's repository
+ var repository = ExtractRepository(artifactDigest);
+ var verdicts = await _verdictStore.ListByRepositoryAsync(repository, limit: 10, ct);
+
+ var previousVerdict = verdicts
+ .Where(v => v.ArtifactDigest != artifactDigest)
+ .OrderByDescending(v => v.EvaluatedAt)
+ .FirstOrDefault();
+
+ if (previousVerdict?.KnowledgeSnapshotId is null)
+ {
+ return BaselineSelectionResult.NotFound("No previous build found");
+ }
+
+ var snapshot = await _snapshotStore.GetAsync(previousVerdict.KnowledgeSnapshotId, ct);
+ return snapshot is not null
+ ? BaselineSelectionResult.Success(snapshot, BaselineSelectionStrategy.PreviousBuild)
+ : BaselineSelectionResult.NotFound("Previous build snapshot not found");
+ }
+
+ private async Task SelectLastApprovedAsync(
+ string artifactDigest, CancellationToken ct)
+ {
+ var repository = ExtractRepository(artifactDigest);
+
+ // Find the most recent passing verdict
+ var verdicts = await _verdictStore.ListByRepositoryAsync(repository, limit: 50, ct);
+
+ var approvedVerdict = verdicts
+ .Where(v => v.Status is RiskVerdictStatus.Pass or RiskVerdictStatus.PassWithExceptions)
+ .OrderByDescending(v => v.EvaluatedAt)
+ .FirstOrDefault();
+
+ if (approvedVerdict?.KnowledgeSnapshotId is null)
+ {
+ return BaselineSelectionResult.NotFound("No approved baseline found");
+ }
+
+ var snapshot = await _snapshotStore.GetAsync(approvedVerdict.KnowledgeSnapshotId, ct);
+ return snapshot is not null
+ ? BaselineSelectionResult.Success(snapshot, BaselineSelectionStrategy.LastApproved)
+ : BaselineSelectionResult.NotFound("Approved baseline snapshot not found");
+ }
+
+ private async Task SelectProductionAsync(
+ string artifactDigest, CancellationToken ct)
+ {
+ var repository = ExtractRepository(artifactDigest);
+
+ // Find verdict tagged as production deployment
+ var prodVerdict = await _verdictStore.GetByTagAsync(repository, "production", ct);
+
+ if (prodVerdict?.KnowledgeSnapshotId is null)
+ {
+ return BaselineSelectionResult.NotFound("No production baseline found");
+ }
+
+ var snapshot = await _snapshotStore.GetAsync(prodVerdict.KnowledgeSnapshotId, ct);
+ return snapshot is not null
+ ? BaselineSelectionResult.Success(snapshot, BaselineSelectionStrategy.ProductionDeployed)
+ : BaselineSelectionResult.NotFound("Production baseline snapshot not found");
+ }
+
+ private async Task SelectBranchBaseAsync(
+ string artifactDigest, CancellationToken ct)
+ {
+ // This would integrate with git to find the branch base
+ // For now, fall back to last approved
+ return await SelectLastApprovedAsync(artifactDigest, ct);
+ }
+
+ private static string ExtractRepository(string artifactDigest)
+ {
+ // Extract repository from artifact metadata
+ // This is a simplified implementation
+ return artifactDigest.Split('@')[0];
+ }
+}
+
+///
+/// Strategies for selecting a baseline.
+///
+public enum BaselineSelectionStrategy
+{
+ ///
+ /// Use the immediately previous build of the same artifact.
+ ///
+ PreviousBuild,
+
+ ///
+ /// Use the most recent build that passed policy.
+ ///
+ LastApproved,
+
+ ///
+ /// Use the build currently deployed to production.
+ ///
+ ProductionDeployed,
+
+ ///
+ /// Use the commit where the current branch diverged.
+ ///
+ BranchBase,
+
+ ///
+ /// Use an explicitly specified baseline.
+ ///
+ Explicit
+}
+
+public sealed record BaselineSelectionResult
+{
+ public required bool IsFound { get; init; }
+ public KnowledgeSnapshotManifest? Snapshot { get; init; }
+ public BaselineSelectionStrategy? Strategy { get; init; }
+ public string? Error { get; init; }
+
+ public static BaselineSelectionResult Success(KnowledgeSnapshotManifest snapshot, BaselineSelectionStrategy strategy) =>
+ new() { IsFound = true, Snapshot = snapshot, Strategy = strategy };
+
+ public static BaselineSelectionResult NotFound(string error) =>
+ new() { IsFound = false, Error = error };
+}
+
+public interface IBaselineSelector
+{
+ Task SelectBaselineAsync(
+ string artifactDigest,
+ BaselineSelectionStrategy strategy,
+ CancellationToken ct = default);
+
+ Task SelectExplicitAsync(
+ string baselineSnapshotId,
+ CancellationToken ct = default);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `BaselineSelector.cs` created
+- [ ] Multiple selection strategies: PreviousBuild, LastApproved, ProductionDeployed, BranchBase, Explicit
+- [ ] Fallback when baseline not found
+- [ ] Integration with verdict store
+- [ ] Logging for strategy selection
+
+---
+
+### T5: Create DeltaVerdictStatement
+
+**Assignee**: Policy Team
+**Story Points**: 3
+**Status**: TODO
+**Dependencies**: T2
+
+**Description**:
+Create signed attestation for delta verdicts.
+
+**Implementation Path**: `Deltas/DeltaVerdictStatement.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.Deltas;
+
+///
+/// Creates in-toto statements for delta verdicts.
+///
+public static class DeltaVerdictStatement
+{
+ public const string PredicateType = "https://stella.ops/predicates/delta-verdict@v1";
+
+ ///
+ /// Creates an in-toto statement from a delta verdict.
+ ///
+ public static InTotoStatement CreateStatement(
+ SecurityStateDelta delta,
+ DeltaVerdict verdict)
+ {
+ return new InTotoStatement
+ {
+ Type = "https://in-toto.io/Statement/v1",
+ Subject = new[]
+ {
+ new InTotoSubject
+ {
+ Name = delta.Artifact.Name ?? delta.Artifact.Digest,
+ Digest = new Dictionary
+ {
+ ["sha256"] = delta.Artifact.Digest.Replace("sha256:", "")
+ }
+ }
+ },
+ PredicateType = PredicateType,
+ Predicate = new DeltaVerdictPredicate
+ {
+ DeltaId = delta.DeltaId,
+ VerdictId = verdict.VerdictId,
+ Status = verdict.Status.ToString(),
+ BaselineSnapshotId = delta.BaselineSnapshotId,
+ TargetSnapshotId = delta.TargetSnapshotId,
+ RecommendedGate = verdict.RecommendedGate.ToString(),
+ RiskPoints = verdict.RiskPoints,
+ Summary = new DeltaSummaryPredicate
+ {
+ TotalChanges = delta.Summary.TotalChanges,
+ RiskIncreasing = delta.Summary.RiskIncreasing,
+ RiskDecreasing = delta.Summary.RiskDecreasing,
+ RiskDirection = delta.Summary.RiskDirection
+ },
+ BlockingDrivers = verdict.BlockingDrivers
+ .Select(d => new DriverPredicate { Type = d.Type, Description = d.Description })
+ .ToList(),
+ AppliedExceptions = verdict.AppliedExceptions.ToList(),
+ EvaluatedAt = verdict.EvaluatedAt.ToString("o")
+ }
+ };
+ }
+}
+
+public sealed record DeltaVerdictPredicate
+{
+ [JsonPropertyName("deltaId")]
+ public required string DeltaId { get; init; }
+
+ [JsonPropertyName("verdictId")]
+ public required string VerdictId { get; init; }
+
+ [JsonPropertyName("status")]
+ public required string Status { get; init; }
+
+ [JsonPropertyName("baselineSnapshotId")]
+ public required string BaselineSnapshotId { get; init; }
+
+ [JsonPropertyName("targetSnapshotId")]
+ public required string TargetSnapshotId { get; init; }
+
+ [JsonPropertyName("recommendedGate")]
+ public required string RecommendedGate { get; init; }
+
+ [JsonPropertyName("riskPoints")]
+ public int RiskPoints { get; init; }
+
+ [JsonPropertyName("summary")]
+ public required DeltaSummaryPredicate Summary { get; init; }
+
+ [JsonPropertyName("blockingDrivers")]
+ public required IReadOnlyList BlockingDrivers { get; init; }
+
+ [JsonPropertyName("appliedExceptions")]
+ public required IReadOnlyList AppliedExceptions { get; init; }
+
+ [JsonPropertyName("evaluatedAt")]
+ public required string EvaluatedAt { get; init; }
+}
+
+public sealed record DeltaSummaryPredicate
+{
+ [JsonPropertyName("totalChanges")]
+ public int TotalChanges { get; init; }
+
+ [JsonPropertyName("riskIncreasing")]
+ public int RiskIncreasing { get; init; }
+
+ [JsonPropertyName("riskDecreasing")]
+ public int RiskDecreasing { get; init; }
+
+ [JsonPropertyName("riskDirection")]
+ public required string RiskDirection { get; init; }
+}
+
+public sealed record DriverPredicate
+{
+ [JsonPropertyName("type")]
+ public required string Type { get; init; }
+
+ [JsonPropertyName("description")]
+ public required string Description { get; init; }
+}
+
+///
+/// Service for creating and signing delta verdict attestations.
+///
+public sealed class DeltaVerdictAttestor : IDeltaVerdictAttestor
+{
+ private readonly ISigner _signer;
+ private readonly ILogger _logger;
+
+ public async Task AttestAsync(
+ SecurityStateDelta delta,
+ DeltaVerdict verdict,
+ CancellationToken ct = default)
+ {
+ var statement = DeltaVerdictStatement.CreateStatement(delta, verdict);
+
+ var payload = JsonSerializer.SerializeToUtf8Bytes(statement, new JsonSerializerOptions
+ {
+ PropertyNamingPolicy = JsonNamingPolicy.CamelCase
+ });
+
+ var signature = await _signer.SignAsync(payload, ct);
+
+ _logger.LogInformation(
+ "Created delta verdict attestation for {DeltaId} with status {Status}",
+ delta.DeltaId, verdict.Status);
+
+ return new DsseEnvelope
+ {
+ PayloadType = "application/vnd.in-toto+json",
+ Payload = Convert.ToBase64String(payload),
+ Signatures = new[]
+ {
+ new DsseSignature
+ {
+ KeyId = _signer.KeyId,
+ Sig = Convert.ToBase64String(signature)
+ }
+ }
+ };
+ }
+}
+
+public interface IDeltaVerdictAttestor
+{
+ Task AttestAsync(
+ SecurityStateDelta delta,
+ DeltaVerdict verdict,
+ CancellationToken ct = default);
+}
+```
+
+**Acceptance Criteria**:
+- [ ] `DeltaVerdictStatement.cs` created
+- [ ] Predicate type: `stella.ops/predicates/delta-verdict@v1`
+- [ ] In-toto statement structure correct
+- [ ] Delta summary included in predicate
+- [ ] Blocking drivers listed
+- [ ] Attestor service for signing
+
+---
+
+### T6: Add Delta API Endpoints
+
+**Assignee**: Policy Team
+**Story Points**: 2
+**Status**: TODO
+**Dependencies**: T3, T4, T5
+
+**Description**:
+Add REST API endpoints for delta operations.
+
+**Implementation Path**: `src/Policy/StellaOps.Policy.WebService/Controllers/DeltasController.cs` (new file)
+
+**Implementation**:
+```csharp
+namespace StellaOps.Policy.WebService.Controllers;
+
+[ApiController]
+[Route("api/v1/policy/deltas")]
+public class DeltasController : ControllerBase
+{
+ private readonly IDeltaComputer _deltaComputer;
+ private readonly IBaselineSelector _baselineSelector;
+ private readonly IDeltaVerdictAttestor _attestor;
+ private readonly ILogger