diff --git a/.claude/settings.local.json b/.claude/settings.local.json
index b7e943c1d..200b4d1f6 100644
--- a/.claude/settings.local.json
+++ b/.claude/settings.local.json
@@ -15,7 +15,10 @@
"Bash(Select-Object -ExpandProperty FullName)",
"Bash(echo:*)",
"Bash(Out-File -FilePath \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Libraries\\StellaOps.Scanner.Surface\\StellaOps.Scanner.Surface.csproj\" -Encoding utf8)",
- "Bash(wc:*)"
+ "Bash(wc:*)",
+ "Bash(find:*)",
+ "WebFetch(domain:docs.gradle.org)",
+ "WebSearch"
],
"deny": [],
"ask": []
diff --git a/.gitea/workflows/mock-dev-release.yml b/.gitea/workflows/mock-dev-release.yml
index 03d7b14e3..da4a21065 100644
--- a/.gitea/workflows/mock-dev-release.yml
+++ b/.gitea/workflows/mock-dev-release.yml
@@ -23,8 +23,18 @@ jobs:
cp deploy/downloads/manifest.json out/mock-release/
tar -czf out/mock-release/mock-dev-release.tgz -C out/mock-release .
+ - name: Compose config (dev + mock overlay)
+ run: |
+ set -euo pipefail
+ cd deploy/compose
+ docker compose --env-file env/dev.env.example --env-file env/mock.env.example \
+ -f docker-compose.dev.yaml -f docker-compose.mock.yaml config > /tmp/compose-mock-config.yaml
+ ls -lh /tmp/compose-mock-config.yaml
+
- name: Upload mock release bundle
uses: actions/upload-artifact@v3
with:
name: mock-dev-release
- path: out/mock-release/mock-dev-release.tgz
+ path: |
+ out/mock-release/mock-dev-release.tgz
+ /tmp/compose-mock-config.yaml
diff --git a/docs/api/taskrunner-openapi.yaml b/docs/api/taskrunner-openapi.yaml
new file mode 100644
index 000000000..5f8432a36
--- /dev/null
+++ b/docs/api/taskrunner-openapi.yaml
@@ -0,0 +1,886 @@
+# OpenAPI 3.1 specification for StellaOps TaskRunner WebService
+openapi: 3.1.0
+info:
+ title: StellaOps TaskRunner API
+ version: 0.1.0-draft
+ description: |
+ Contract for TaskRunner service covering pack runs, simulations, logs, artifacts, and approvals.
+ Uses the platform error envelope and tenant header `X-StellaOps-Tenant`.
+
+ ## Streaming Endpoints
+ The `/runs/{runId}/logs` endpoint returns logs in NDJSON (Newline Delimited JSON) format
+ for efficient streaming. Each line is a complete JSON object.
+
+ ## Control Flow Steps
+ TaskPacks support the following step kinds:
+ - **run**: Execute an action using a builtin or custom executor
+ - **parallel**: Execute child steps concurrently with optional maxParallel limit
+ - **map**: Iterate over items and execute a template step for each
+ - **loop**: Iterate with items expression, range, or static list
+ - **conditional**: Branch based on condition expressions
+ - **gate.approval**: Require manual approval before proceeding
+ - **gate.policy**: Evaluate policy and optionally require override approval
+servers:
+ - url: https://taskrunner.stellaops.example.com
+ description: Production
+ - url: https://taskrunner.dev.stellaops.example.com
+ description: Development
+security:
+ - oauth2: [taskrunner.viewer]
+ - oauth2: [taskrunner.operator]
+ - oauth2: [taskrunner.admin]
+
+paths:
+ /v1/task-runner/simulations:
+ post:
+ summary: Simulate a task pack
+ description: |
+ Validates a task pack manifest, creates an execution plan, and simulates the run
+ without actually executing any steps. Returns the simulation result showing which
+ steps would execute, which are skipped, and which require approvals.
+ operationId: simulateTaskPack
+ tags: [Simulations]
+ parameters:
+ - $ref: '#/components/parameters/Tenant'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/SimulationRequest'
+ examples:
+ basic-simulation:
+ summary: Basic simulation request
+ value:
+ manifest: |
+ apiVersion: stellaops.io/pack.v1
+ kind: TaskPack
+ metadata:
+ name: scan-deploy
+ version: 1.0.0
+ spec:
+ inputs:
+ - name: target
+ type: string
+ required: true
+ sandbox:
+ mode: sealed
+ egressAllowlist: []
+ cpuLimitMillicores: 100
+ memoryLimitMiB: 128
+ quotaSeconds: 60
+ slo:
+ runP95Seconds: 300
+ approvalP95Seconds: 900
+ maxQueueDepth: 100
+ steps:
+ - id: scan
+ run:
+ uses: builtin:scanner
+ with:
+ target: "{{ inputs.target }}"
+ inputs:
+ target: "registry.example.com/app:v1.2.3"
+ responses:
+ '200':
+ description: Simulation completed
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/SimulationResponse'
+ examples:
+ simulation-result:
+ value:
+ planHash: "sha256:a1b2c3d4e5f6..."
+ failurePolicy:
+ maxAttempts: 1
+ backoffSeconds: 0
+ continueOnError: false
+ steps:
+ - id: scan
+ templateId: scan
+ kind: Run
+ enabled: true
+ status: Pending
+ uses: "builtin:scanner"
+ children: []
+ outputs: []
+ hasPendingApprovals: false
+ '400':
+ description: Invalid manifest or inputs
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/PlanErrorResponse'
+ default:
+ $ref: '#/components/responses/Error'
+
+ /v1/task-runner/runs:
+ post:
+ summary: Create a pack run
+ description: |
+ Creates a new pack run from a task pack manifest. The run is scheduled for execution
+ and will proceed through its steps. If approval gates are present, the run will pause
+ at those gates until approvals are granted.
+ operationId: createPackRun
+ tags: [Runs]
+ parameters:
+ - $ref: '#/components/parameters/Tenant'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreateRunRequest'
+ examples:
+ create-run:
+ summary: Create a new run
+ value:
+ runId: "run-20251206-001"
+ manifest: |
+ apiVersion: stellaops.io/pack.v1
+ kind: TaskPack
+ metadata:
+ name: deploy-app
+ version: 2.0.0
+ spec:
+ sandbox:
+ mode: sealed
+ egressAllowlist: []
+ cpuLimitMillicores: 200
+ memoryLimitMiB: 256
+ quotaSeconds: 120
+ slo:
+ runP95Seconds: 600
+ approvalP95Seconds: 1800
+ maxQueueDepth: 50
+ approvals:
+ - id: security-review
+ grants: [packs.approve]
+ steps:
+ - id: build
+ run:
+ uses: builtin:build
+ - id: approval
+ gate:
+ approval:
+ id: security-review
+ message: "Security review required before deploy"
+ - id: deploy
+ run:
+ uses: builtin:deploy
+ tenantId: "tenant-prod"
+ responses:
+ '201':
+ description: Run created
+ headers:
+ Location:
+ description: URL of the created run
+ schema:
+ type: string
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/RunStateResponse'
+ '400':
+ description: Invalid manifest or inputs
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/PlanErrorResponse'
+ '409':
+ description: Run ID already exists
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorEnvelope'
+ default:
+ $ref: '#/components/responses/Error'
+
+ /v1/task-runner/runs/{runId}:
+ get:
+ summary: Get run state
+ description: |
+ Returns the current state of a pack run, including status of all steps,
+ failure policy, and timing information.
+ operationId: getRunState
+ tags: [Runs]
+ parameters:
+ - $ref: '#/components/parameters/Tenant'
+ - $ref: '#/components/parameters/RunId'
+ responses:
+ '200':
+ description: Run state
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/RunStateResponse'
+ examples:
+ running:
+ summary: Run in progress
+ value:
+ runId: "run-20251206-001"
+ planHash: "sha256:a1b2c3d4..."
+ failurePolicy:
+ maxAttempts: 2
+ backoffSeconds: 30
+ continueOnError: false
+ createdAt: "2025-12-06T10:00:00Z"
+ updatedAt: "2025-12-06T10:05:00Z"
+ steps:
+ - stepId: build
+ kind: Run
+ enabled: true
+ continueOnError: false
+ status: Succeeded
+ attempts: 1
+ lastTransitionAt: "2025-12-06T10:02:00Z"
+ - stepId: approval
+ kind: GateApproval
+ enabled: true
+ continueOnError: false
+ approvalId: security-review
+ gateMessage: "Security review required before deploy"
+ status: Pending
+ attempts: 0
+ statusReason: "awaiting-approval"
+ - stepId: deploy
+ kind: Run
+ enabled: true
+ continueOnError: false
+ status: Pending
+ attempts: 0
+ '404':
+ description: Run not found
+ default:
+ $ref: '#/components/responses/Error'
+
+ /v1/task-runner/runs/{runId}/logs:
+ get:
+ summary: Stream run logs
+ description: |
+ Returns run logs as a stream of NDJSON (Newline Delimited JSON) entries.
+ Each line is a complete JSON object representing a log entry with timestamp,
+ level, event type, message, and optional metadata.
+
+ **Content-Type**: `application/x-ndjson`
+ operationId: streamRunLogs
+ tags: [Logs]
+ parameters:
+ - $ref: '#/components/parameters/Tenant'
+ - $ref: '#/components/parameters/RunId'
+ responses:
+ '200':
+ description: Log stream
+ content:
+ application/x-ndjson:
+ schema:
+ $ref: '#/components/schemas/RunLogEntry'
+ examples:
+ log-stream:
+ summary: Sample NDJSON log stream
+ value: |
+ {"timestamp":"2025-12-06T10:00:00Z","level":"info","eventType":"run.created","message":"Run created via API.","metadata":{"planHash":"sha256:a1b2c3d4...","requestedAt":"2025-12-06T10:00:00Z"}}
+ {"timestamp":"2025-12-06T10:00:01Z","level":"info","eventType":"step.started","message":"Starting step: build","stepId":"build"}
+ {"timestamp":"2025-12-06T10:02:00Z","level":"info","eventType":"step.completed","message":"Step completed: build","stepId":"build","metadata":{"duration":"119s"}}
+ {"timestamp":"2025-12-06T10:02:01Z","level":"warn","eventType":"gate.awaiting","message":"Awaiting approval: security-review","stepId":"approval"}
+ '404':
+ description: Run not found
+ default:
+ $ref: '#/components/responses/Error'
+
+ /v1/task-runner/runs/{runId}/artifacts:
+ get:
+ summary: List run artifacts
+ description: |
+ Returns a list of artifacts captured during the run, including file outputs,
+ evidence bundles, and expression-evaluated results.
+ operationId: listRunArtifacts
+ tags: [Artifacts]
+ parameters:
+ - $ref: '#/components/parameters/Tenant'
+ - $ref: '#/components/parameters/RunId'
+ responses:
+ '200':
+ description: Artifact list
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ $ref: '#/components/schemas/RunArtifact'
+ examples:
+ artifacts:
+ value:
+ - name: scan-report
+ type: file
+ sourcePath: "/output/scan-report.json"
+ storedPath: "runs/run-20251206-001/artifacts/scan-report.json"
+ status: captured
+ capturedAt: "2025-12-06T10:02:00Z"
+ - name: evidence-bundle
+ type: object
+ status: captured
+ capturedAt: "2025-12-06T10:02:00Z"
+ expressionJson: '{"sha256":"abc123...","attestations":[...]}'
+ '404':
+ description: Run not found
+ default:
+ $ref: '#/components/responses/Error'
+
+ /v1/task-runner/runs/{runId}/approvals/{approvalId}:
+ post:
+ summary: Apply approval decision
+ description: |
+ Applies an approval decision (approved, rejected, or expired) to a pending
+ approval gate. The planHash must match to prevent approving a stale plan.
+
+ If approved, the run will resume execution. If rejected, the run will fail
+ at the gate step.
+ operationId: applyApprovalDecision
+ tags: [Approvals]
+ parameters:
+ - $ref: '#/components/parameters/Tenant'
+ - $ref: '#/components/parameters/RunId'
+ - $ref: '#/components/parameters/ApprovalId'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ApprovalDecisionRequest'
+ examples:
+ approve:
+ summary: Approve the gate
+ value:
+ decision: approved
+ planHash: "sha256:a1b2c3d4e5f678901234567890abcdef1234567890abcdef1234567890abcdef"
+ actorId: "user:alice@example.com"
+ summary: "Reviewed and approved for production deployment"
+ reject:
+ summary: Reject the gate
+ value:
+ decision: rejected
+ planHash: "sha256:a1b2c3d4e5f678901234567890abcdef1234567890abcdef1234567890abcdef"
+ actorId: "user:bob@example.com"
+ summary: "Security scan found critical vulnerabilities"
+ responses:
+ '200':
+ description: Decision applied
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ApprovalDecisionResponse'
+ examples:
+ approved:
+ value:
+ status: approved
+ resumed: true
+ '400':
+ description: Invalid decision or planHash format
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorEnvelope'
+ '404':
+ description: Run or approval not found
+ '409':
+ description: Plan hash mismatch
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorEnvelope'
+ default:
+ $ref: '#/components/responses/Error'
+
+ /v1/task-runner/runs/{runId}/cancel:
+ post:
+ summary: Cancel a run
+ description: |
+ Requests cancellation of a run. Remaining pending steps will be marked as
+ skipped. Steps that have already succeeded or been skipped are not affected.
+ operationId: cancelRun
+ tags: [Runs]
+ parameters:
+ - $ref: '#/components/parameters/Tenant'
+ - $ref: '#/components/parameters/RunId'
+ responses:
+ '202':
+ description: Cancellation accepted
+ headers:
+ Location:
+ description: URL of the run
+ schema:
+ type: string
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ status:
+ type: string
+ enum: [cancelled]
+ '404':
+ description: Run not found
+ default:
+ $ref: '#/components/responses/Error'
+
+ /.well-known/openapi:
+ get:
+ summary: Get OpenAPI metadata
+ description: |
+ Returns metadata about the OpenAPI specification including the spec URL,
+ ETag for caching, and a signature for verification.
+ operationId: getOpenApiMetadata
+ tags: [Metadata]
+ responses:
+ '200':
+ description: OpenAPI metadata
+ headers:
+ ETag:
+ description: Spec version ETag
+ schema:
+ type: string
+ X-Signature:
+ description: Spec signature for verification
+ schema:
+ type: string
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/OpenApiMetadata'
+ examples:
+ metadata:
+ value:
+ specUrl: "/openapi"
+ version: "0.1.0-draft"
+ buildVersion: "20251206.1"
+ etag: '"abc123"'
+ signature: "sha256:def456..."
+
+components:
+ securitySchemes:
+ oauth2:
+ type: oauth2
+ flows:
+ clientCredentials:
+ tokenUrl: https://auth.stellaops.example.com/oauth/token
+ scopes:
+ taskrunner.viewer: Read-only access to runs and logs
+ taskrunner.operator: Create runs and apply approvals
+ taskrunner.admin: Full administrative access
+
+ parameters:
+ Tenant:
+ name: X-StellaOps-Tenant
+ in: header
+ required: false
+ description: Tenant slug (optional for single-tenant deployments)
+ schema:
+ type: string
+ RunId:
+ name: runId
+ in: path
+ required: true
+ description: Unique run identifier
+ schema:
+ type: string
+ pattern: '^[a-zA-Z0-9_-]+$'
+ ApprovalId:
+ name: approvalId
+ in: path
+ required: true
+ description: Approval gate identifier (from task pack approvals section)
+ schema:
+ type: string
+
+ responses:
+ Error:
+ description: Standard error envelope
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ErrorEnvelope'
+ examples:
+ internal-error:
+ value:
+ error:
+ code: internal_error
+ message: "An unexpected error occurred"
+ traceId: "f62f3c2b9c8e4c53"
+
+ schemas:
+ ErrorEnvelope:
+ type: object
+ required: [error]
+ properties:
+ error:
+ type: object
+ required: [code, message]
+ properties:
+ code:
+ type: string
+ description: Machine-readable error code
+ message:
+ type: string
+ description: Human-readable error message
+ traceId:
+ type: string
+ description: Trace ID for debugging
+
+ SimulationRequest:
+ type: object
+ required: [manifest]
+ properties:
+ manifest:
+ type: string
+ description: Task pack manifest in YAML format
+ inputs:
+ type: object
+ additionalProperties: true
+ description: Input values to provide to the task pack
+
+ SimulationResponse:
+ type: object
+ required: [planHash, failurePolicy, steps, outputs, hasPendingApprovals]
+ properties:
+ planHash:
+ type: string
+ description: SHA-256 hash of the execution plan
+ pattern: '^sha256:[a-f0-9]{64}$'
+ failurePolicy:
+ $ref: '#/components/schemas/FailurePolicy'
+ steps:
+ type: array
+ items:
+ $ref: '#/components/schemas/SimulationStep'
+ outputs:
+ type: array
+ items:
+ $ref: '#/components/schemas/SimulationOutput'
+ hasPendingApprovals:
+ type: boolean
+ description: Whether the plan contains approval gates
+
+ SimulationStep:
+ type: object
+ required: [id, templateId, kind, enabled, status, children]
+ properties:
+ id:
+ type: string
+ templateId:
+ type: string
+ kind:
+ type: string
+ enum: [Run, GateApproval, GatePolicy, Parallel, Map, Loop, Conditional, Unknown]
+ enabled:
+ type: boolean
+ status:
+ type: string
+ enum: [Pending, Skipped, RequiresApproval, RequiresPolicy, WillIterate, WillBranch]
+ statusReason:
+ type: string
+ uses:
+ type: string
+ description: Executor reference for run steps
+ approvalId:
+ type: string
+ gateMessage:
+ type: string
+ maxParallel:
+ type: integer
+ continueOnError:
+ type: boolean
+ children:
+ type: array
+ items:
+ $ref: '#/components/schemas/SimulationStep'
+ loopInfo:
+ $ref: '#/components/schemas/LoopInfo'
+ conditionalInfo:
+ $ref: '#/components/schemas/ConditionalInfo'
+ policyInfo:
+ $ref: '#/components/schemas/PolicyInfo'
+
+ LoopInfo:
+ type: object
+ description: Loop step simulation details
+ properties:
+ itemsExpression:
+ type: string
+ iterator:
+ type: string
+ index:
+ type: string
+ maxIterations:
+ type: integer
+ aggregationMode:
+ type: string
+ enum: [collect, merge, last, first, none]
+
+ ConditionalInfo:
+ type: object
+ description: Conditional step simulation details
+ properties:
+ branches:
+ type: array
+ items:
+ type: object
+ properties:
+ condition:
+ type: string
+ stepCount:
+ type: integer
+ elseStepCount:
+ type: integer
+ outputUnion:
+ type: boolean
+
+ PolicyInfo:
+ type: object
+ description: Policy gate simulation details
+ properties:
+ policyId:
+ type: string
+ policyVersion:
+ type: string
+ failureAction:
+ type: string
+ enum: [abort, warn, requestOverride, branch]
+ retryCount:
+ type: integer
+
+ SimulationOutput:
+ type: object
+ required: [name, type, requiresRuntimeValue]
+ properties:
+ name:
+ type: string
+ type:
+ type: string
+ requiresRuntimeValue:
+ type: boolean
+ pathExpression:
+ type: string
+ valueExpression:
+ type: string
+
+ CreateRunRequest:
+ type: object
+ required: [manifest]
+ properties:
+ runId:
+ type: string
+ description: Optional custom run ID (auto-generated if not provided)
+ manifest:
+ type: string
+ description: Task pack manifest in YAML format
+ inputs:
+ type: object
+ additionalProperties: true
+ description: Input values to provide to the task pack
+ tenantId:
+ type: string
+ description: Tenant identifier
+
+ RunStateResponse:
+ type: object
+ required: [runId, planHash, failurePolicy, createdAt, updatedAt, steps]
+ properties:
+ runId:
+ type: string
+ planHash:
+ type: string
+ pattern: '^sha256:[a-f0-9]{64}$'
+ failurePolicy:
+ $ref: '#/components/schemas/FailurePolicy'
+ createdAt:
+ type: string
+ format: date-time
+ updatedAt:
+ type: string
+ format: date-time
+ steps:
+ type: array
+ items:
+ $ref: '#/components/schemas/RunStateStep'
+
+ RunStateStep:
+ type: object
+ required: [stepId, kind, enabled, continueOnError, status, attempts]
+ properties:
+ stepId:
+ type: string
+ kind:
+ type: string
+ enum: [Run, GateApproval, GatePolicy, Parallel, Map, Loop, Conditional, Unknown]
+ enabled:
+ type: boolean
+ continueOnError:
+ type: boolean
+ maxParallel:
+ type: integer
+ approvalId:
+ type: string
+ gateMessage:
+ type: string
+ status:
+ type: string
+ enum: [Pending, Running, Succeeded, Failed, Skipped]
+ attempts:
+ type: integer
+ lastTransitionAt:
+ type: string
+ format: date-time
+ nextAttemptAt:
+ type: string
+ format: date-time
+ statusReason:
+ type: string
+
+ FailurePolicy:
+ type: object
+ required: [maxAttempts, backoffSeconds, continueOnError]
+ properties:
+ maxAttempts:
+ type: integer
+ minimum: 1
+ backoffSeconds:
+ type: integer
+ minimum: 0
+ continueOnError:
+ type: boolean
+
+ RunLogEntry:
+ type: object
+ required: [timestamp, level, eventType, message]
+ description: |
+ Log entry returned in NDJSON stream. Each entry is a single JSON object
+ followed by a newline character.
+ properties:
+ timestamp:
+ type: string
+ format: date-time
+ level:
+ type: string
+ enum: [debug, info, warn, error]
+ eventType:
+ type: string
+ description: |
+ Event type identifier, e.g.:
+ - run.created, run.started, run.completed, run.failed, run.cancelled
+ - step.started, step.completed, step.failed, step.skipped
+ - gate.awaiting, gate.approved, gate.rejected
+ - run.schedule-failed, run.cancel-requested
+ message:
+ type: string
+ stepId:
+ type: string
+ metadata:
+ type: object
+ additionalProperties:
+ type: string
+
+ RunArtifact:
+ type: object
+ required: [name, type, status]
+ properties:
+ name:
+ type: string
+ type:
+ type: string
+ enum: [file, object]
+ sourcePath:
+ type: string
+ storedPath:
+ type: string
+ status:
+ type: string
+ enum: [pending, captured, failed]
+ notes:
+ type: string
+ capturedAt:
+ type: string
+ format: date-time
+ expressionJson:
+ type: string
+ description: JSON string of evaluated expression result for object outputs
+
+ ApprovalDecisionRequest:
+ type: object
+ required: [decision, planHash]
+ properties:
+ decision:
+ type: string
+ enum: [approved, rejected, expired]
+ planHash:
+ type: string
+ pattern: '^sha256:[a-f0-9]{64}$'
+ description: Plan hash to verify against (must match current run plan)
+ actorId:
+ type: string
+ description: Identifier of the approver (e.g., user:alice@example.com)
+ summary:
+ type: string
+ description: Optional comment explaining the decision
+
+ ApprovalDecisionResponse:
+ type: object
+ required: [status, resumed]
+ properties:
+ status:
+ type: string
+ enum: [approved, rejected, expired]
+ resumed:
+ type: boolean
+ description: Whether the run was resumed (true for approved decisions)
+
+ PlanErrorResponse:
+ type: object
+ required: [errors]
+ properties:
+ errors:
+ type: array
+ items:
+ type: object
+ required: [path, message]
+ properties:
+ path:
+ type: string
+ description: JSON path to the error location
+ message:
+ type: string
+
+ OpenApiMetadata:
+ type: object
+ required: [specUrl, version, etag]
+ properties:
+ specUrl:
+ type: string
+ description: URL to fetch the full OpenAPI spec
+ version:
+ type: string
+ description: API version
+ buildVersion:
+ type: string
+ description: Build version identifier
+ etag:
+ type: string
+ description: ETag for caching
+ signature:
+ type: string
+ description: Signature for spec verification
+
+tags:
+ - name: Simulations
+ description: Task pack simulation without execution
+ - name: Runs
+ description: Pack run lifecycle management
+ - name: Logs
+ description: Run log streaming
+ - name: Artifacts
+ description: Run artifact management
+ - name: Approvals
+ description: Approval gate decisions
+ - name: Metadata
+ description: Service metadata and discovery
diff --git a/docs/contracts/authority-crypto-provider.md b/docs/contracts/authority-crypto-provider.md
new file mode 100644
index 000000000..a4ca7c156
--- /dev/null
+++ b/docs/contracts/authority-crypto-provider.md
@@ -0,0 +1,369 @@
+# Authority Crypto Provider Contract
+
+> **Status:** APPROVED
+> **Version:** 1.0.0
+> **Last Updated:** 2025-12-06
+> **Owner:** Authority Core Guild
+> **Unblocks:** AUTH-CRYPTO-90-001, SEC-CRYPTO-90-014, SCANNER-CRYPTO-90-001, ATTESTOR-CRYPTO-90-001
+
+## Overview
+
+This contract defines the Authority signing provider interface for StellaOps, enabling pluggable cryptographic backends including:
+- **Software keys** (default) — ECDSA P-256/P-384, RSA, EdDSA
+- **HSM integration** — PKCS#11, Cloud KMS (AWS, GCP, Azure)
+- **Regional compliance** — CryptoPro GOST (R1), SM2/SM3 (CN), eIDAS (EU), FIPS 140-2
+
+## Architecture
+
+```
+┌─────────────────────────────────────────────────────────────────────────────┐
+│ Authority Crypto Provider │
+├─────────────────────────────────────────────────────────────────────────────┤
+│ │
+│ ┌─────────────────────────────────────────────────────────────────────────┐│
+│ │ ISigningProvider Interface ││
+│ │ ││
+│ │ + Sign(data: byte[], keyId: string) → SignatureResult ││
+│ │ + Verify(data: byte[], signature: byte[], keyId: string) → bool ││
+│ │ + GetPublicKey(keyId: string) → PublicKeyInfo ││
+│ │ + ListKeys(filter: KeyFilter) → KeyInfo[] ││
+│ │ + CreateKey(spec: KeySpec) → KeyInfo ││
+│ │ + RotateKey(keyId: string) → KeyInfo ││
+│ │ + ExportJWKS(keyIds: string[]) → JWKS ││
+│ └─────────────────────────────────────────────────────────────────────────┘│
+│ │ │
+│ ┌────────────────────┼────────────────────┐ │
+│ ▼ ▼ ▼ │
+│ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │
+│ │ Software │ │ PKCS#11 │ │ Cloud KMS │ │
+│ │ Provider │ │ Provider │ │ Provider │ │
+│ │ │ │ │ │ │ │
+│ │ • File keys │ │ • HSM │ │ • AWS KMS │ │
+│ │ • Memory │ │ • SmartCard │ │ • GCP KMS │ │
+│ │ • Vault │ │ • CryptoPro │ │ • Azure KV │ │
+│ └──────────────┘ └──────────────┘ └──────────────┘ │
+│ │
+└─────────────────────────────────────────────────────────────────────────────┘
+```
+
+## 1. ISigningProvider Interface
+
+### 1.1 Core Methods
+
+```csharp
+///
+/// Pluggable cryptographic signing provider for Authority service.
+///
+public interface ISigningProvider
+{
+ /// Provider identifier (e.g., "software", "pkcs11", "aws-kms")
+ string ProviderId { get; }
+
+ /// Supported algorithms by this provider
+ IReadOnlyList SupportedAlgorithms { get; }
+
+ /// Sign data with the specified key
+ Task SignAsync(
+ byte[] data,
+ string keyId,
+ SigningOptions? options = null,
+ CancellationToken ct = default);
+
+ /// Verify a signature
+ Task VerifyAsync(
+ byte[] data,
+ byte[] signature,
+ string keyId,
+ CancellationToken ct = default);
+
+ /// Get public key information
+ Task GetPublicKeyAsync(
+ string keyId,
+ CancellationToken ct = default);
+
+ /// List available keys
+ Task> ListKeysAsync(
+ KeyFilter? filter = null,
+ CancellationToken ct = default);
+
+ /// Create a new key pair
+ Task CreateKeyAsync(
+ KeySpec spec,
+ CancellationToken ct = default);
+
+ /// Rotate a key (create new version)
+ Task RotateKeyAsync(
+ string keyId,
+ CancellationToken ct = default);
+
+ /// Export keys as JWKS for distributed verification
+ Task ExportJwksAsync(
+ IEnumerable? keyIds = null,
+ CancellationToken ct = default);
+
+ /// Import a public key for verification
+ Task ImportPublicKeyAsync(
+ byte[] keyData,
+ string format,
+ KeyMetadata? metadata = null,
+ CancellationToken ct = default);
+}
+```
+
+### 1.2 Supporting Types
+
+```csharp
+public record SignatureResult(
+ byte[] Signature,
+ string Algorithm,
+ string KeyId,
+ string? KeyVersion,
+ DateTimeOffset Timestamp);
+
+public record SigningOptions(
+ string? Algorithm = null,
+ bool IncludeTimestamp = true,
+ string? Nonce = null);
+
+public record PublicKeyInfo(
+ string KeyId,
+ string Algorithm,
+ byte[] PublicKey,
+ string Format, // "PEM", "DER", "JWK"
+ string? Fingerprint,
+ DateTimeOffset? ExpiresAt);
+
+public record KeyInfo(
+ string KeyId,
+ string Algorithm,
+ KeyState State,
+ DateTimeOffset CreatedAt,
+ DateTimeOffset? ExpiresAt,
+ string? CurrentVersion,
+ IReadOnlyDictionary? Metadata);
+
+public enum KeyState
+{
+ Active,
+ Disabled,
+ PendingDeletion,
+ Deleted
+}
+
+public record KeySpec(
+ string Algorithm,
+ int? KeySize = null,
+ string? Purpose = null, // "signing", "attestation", "authority"
+ IReadOnlyDictionary? Metadata = null,
+ DateTimeOffset? ExpiresAt = null);
+
+public record KeyFilter(
+ string? Purpose = null,
+ KeyState? State = null,
+ string? Algorithm = null);
+```
+
+## 2. Supported Algorithms
+
+### 2.1 Algorithm Registry
+
+| Algorithm | OID | Key Size | Compliance | Provider Support |
+|-----------|-----|----------|------------|------------------|
+| **ES256** | 1.2.840.10045.4.3.2 | P-256 | FIPS, eIDAS | All |
+| **ES384** | 1.2.840.10045.4.3.3 | P-384 | FIPS, eIDAS | All |
+| **RS256** | 1.2.840.113549.1.1.11 | 2048+ | FIPS, eIDAS | All |
+| **RS384** | 1.2.840.113549.1.1.12 | 2048+ | FIPS, eIDAS | All |
+| **EdDSA** | 1.3.101.112 | Ed25519 | — | Software, some HSM |
+| **PS256** | 1.2.840.113549.1.1.10 | 2048+ | FIPS | All |
+| **GOST R 34.10-2012** | 1.2.643.7.1.1.1.1 | 256/512 | R1 | PKCS#11 (CryptoPro) |
+| **SM2** | 1.2.156.10197.1.301 | 256 | CN | PKCS#11 |
+
+### 2.2 Default Configuration
+
+```yaml
+# etc/authority.yaml
+crypto:
+ provider: software # or: pkcs11, aws-kms, gcp-kms, azure-keyvault
+
+ software:
+ keys_path: /var/lib/stellaops/keys
+ default_algorithm: ES256
+
+ pkcs11:
+ library_path: /usr/lib/libpkcs11.so
+ slot_id: 0
+ pin_env: AUTHORITY_HSM_PIN
+ # For CryptoPro:
+ # library_path: /opt/cprocsp/lib/amd64/libcapi20.so
+
+ aws_kms:
+ region: us-east-1
+ key_alias_prefix: stellaops/
+
+ azure_keyvault:
+ vault_url: https://stellaops.vault.azure.net/
+
+ gcp_kms:
+ project: stellaops-prod
+ location: global
+ key_ring: attestation-keys
+
+ # Regional compliance overrides
+ compliance:
+ ru:
+ provider: pkcs11
+ algorithms: [GOST-R-34.10-2012-256, GOST-R-34.10-2012-512]
+ library_path: /opt/cprocsp/lib/amd64/libcapi20.so
+ cn:
+ provider: pkcs11
+ algorithms: [SM2]
+```
+
+## 3. JWKS Export Requirements
+
+### 3.1 JWKS Endpoint
+
+The Authority service MUST expose a JWKS endpoint for distributed verification:
+
+```
+GET /.well-known/jwks.json
+```
+
+Response format:
+
+```json
+{
+ "keys": [
+ {
+ "kty": "EC",
+ "crv": "P-256",
+ "x": "base64url-encoded-x",
+ "y": "base64url-encoded-y",
+ "kid": "attestation-key-001",
+ "alg": "ES256",
+ "use": "sig",
+ "key_ops": ["verify"],
+ "x5t#S256": "sha256-fingerprint"
+ }
+ ]
+}
+```
+
+### 3.2 Key Rotation
+
+When keys are rotated:
+1. New key becomes `Active`, old key becomes `Disabled` (verification-only)
+2. JWKS includes both keys during transition period
+3. Old key removed after `rotation_grace_period` (default: 7 days)
+4. All consuming services refresh JWKS on schedule or via webhook
+
+### 3.3 Key Discovery Flow
+
+```
+┌──────────┐ ┌──────────┐ ┌──────────┐
+│ Scanner │ │ Authority │ │ Attestor │
+└────┬─────┘ └────┬─────┘ └────┬─────┘
+ │ │ │
+ │ GET /jwks.json│ │
+ │───────────────>│ │
+ │<───────────────│ │
+ │ JWKS │ │
+ │ │ │
+ │ Sign(SBOM) │ │
+ │───────────────>│ │
+ │<───────────────│ │
+ │ Signature │ │
+ │ │ │
+ │ │ GET /jwks.json │
+ │ │<────────────────│
+ │ │────────────────>│
+ │ │ JWKS │
+ │ │ │
+ │ │ Verify(SBOM) │
+ │ │<────────────────│
+ │ │ ✓ Valid │
+```
+
+## 4. Provider Registration
+
+### 4.1 Service Registration
+
+```csharp
+// Program.cs
+services.AddAuthoritySigningProvider(options =>
+{
+ options.Provider = configuration["Crypto:Provider"];
+ options.Configuration = configuration.GetSection("Crypto");
+});
+
+// Extension method
+public static IServiceCollection AddAuthoritySigningProvider(
+ this IServiceCollection services,
+ Action configure)
+{
+ var options = new CryptoProviderOptions();
+ configure(options);
+
+ return options.Provider switch
+ {
+ "software" => services.AddSingleton(),
+ "pkcs11" => services.AddSingleton(),
+ "aws-kms" => services.AddSingleton(),
+ "gcp-kms" => services.AddSingleton(),
+ "azure-keyvault" => services.AddSingleton(),
+ _ => throw new ArgumentException($"Unknown provider: {options.Provider}")
+ };
+}
+```
+
+### 4.2 Regional Provider Registry
+
+For multi-region deployments with compliance requirements:
+
+```yaml
+# Regional key registry
+key_registry:
+ attestation-sbom:
+ default:
+ key_id: "stellaops/attestation-sbom-001"
+ algorithm: ES256
+ provider: aws-kms
+ ru:
+ key_id: "ru/attestation-sbom-gost"
+ algorithm: GOST-R-34.10-2012-256
+ provider: pkcs11
+ cn:
+ key_id: "cn/attestation-sbom-sm2"
+ algorithm: SM2
+ provider: pkcs11
+```
+
+## 5. Error Codes
+
+| Code | Name | Description |
+|------|------|-------------|
+| `CRYPTO_001` | `KEY_NOT_FOUND` | Requested key does not exist |
+| `CRYPTO_002` | `KEY_DISABLED` | Key is disabled and cannot sign |
+| `CRYPTO_003` | `ALGORITHM_UNSUPPORTED` | Algorithm not supported by provider |
+| `CRYPTO_004` | `HSM_UNAVAILABLE` | HSM/PKCS#11 device not available |
+| `CRYPTO_005` | `SIGNATURE_FAILED` | Signing operation failed |
+| `CRYPTO_006` | `VERIFICATION_FAILED` | Signature verification failed |
+| `CRYPTO_007` | `KEY_EXPIRED` | Key has expired |
+| `CRYPTO_008` | `COMPLIANCE_VIOLATION` | Algorithm not allowed by compliance profile |
+
+## 6. Tasks Unblocked
+
+This contract unblocks:
+
+| Task ID | Description | Status |
+|---------|-------------|--------|
+| AUTH-CRYPTO-90-001 | Authority signing provider contract | ✅ UNBLOCKED |
+| SEC-CRYPTO-90-014 | Security Guild crypto integration | ✅ UNBLOCKED |
+| SCANNER-CRYPTO-90-001 | Scanner SBOM signing | ✅ UNBLOCKED |
+| ATTESTOR-CRYPTO-90-001 | Attestor DSSE signing | ✅ UNBLOCKED |
+
+## 7. Changelog
+
+| Date | Version | Change |
+|------|---------|--------|
+| 2025-12-06 | 1.0.0 | Initial contract with interface, algorithms, JWKS, regional support |
diff --git a/docs/contracts/sealed-install-enforcement.md b/docs/contracts/sealed-install-enforcement.md
new file mode 100644
index 000000000..b26ded9c9
--- /dev/null
+++ b/docs/contracts/sealed-install-enforcement.md
@@ -0,0 +1,425 @@
+# Sealed Install Enforcement Contract
+
+> **Status:** APPROVED
+> **Version:** 1.0.0
+> **Last Updated:** 2025-12-06
+> **Owner:** AirGap Controller Guild
+> **Unblocks:** TASKRUN-AIRGAP-57-001, TASKRUN-AIRGAP-58-001
+
+## Overview
+
+This contract defines the sealed install enforcement semantics for StellaOps air-gapped deployments. When a pack or task declares `sealed_install: true`, the Task Runner MUST refuse to execute if the environment is not properly sealed.
+
+## Architecture
+
+```
+┌─────────────────────────────────────────────────────────────────────────────┐
+│ Sealed Install Enforcement Flow │
+├─────────────────────────────────────────────────────────────────────────────┤
+│ │
+│ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │
+│ │ Task Pack │ │ Task Runner │ │ AirGap │ │
+│ │ │────>│ │────>│ Controller │ │
+│ │ sealed_ │ │ Enforcement │ │ │ │
+│ │ install:true │ │ Check │ │ /status │ │
+│ └──────────────┘ └──────────────┘ └──────────────┘ │
+│ │ │ │
+│ ▼ ▼ │
+│ ┌──────────────────────────────────┐ │
+│ │ Decision Matrix │ │
+│ │ │ │
+│ │ Pack: sealed Env: sealed │ │
+│ │ ────────────── ──────────── │ │
+│ │ true true → RUN │ │
+│ │ true false → DENY │ │
+│ │ false true → RUN │ │
+│ │ false false → RUN │ │
+│ └──────────────────────────────────┘ │
+│ │
+└─────────────────────────────────────────────────────────────────────────────┘
+```
+
+## 1. Pack Declaration
+
+### 1.1 Sealed Install Flag
+
+Packs declare their sealed requirement in the pack manifest:
+
+```json
+{
+ "pack_id": "compliance-scan-airgap",
+ "version": "1.0.0",
+ "name": "Air-Gap Compliance Scanner",
+ "sealed_install": true,
+ "sealed_requirements": {
+ "min_bundle_version": "2025.10.0",
+ "max_advisory_staleness_hours": 168,
+ "require_time_anchor": true,
+ "allowed_offline_duration_hours": 720
+ }
+}
+```
+
+### 1.2 Sealed Requirements Schema
+
+```json
+{
+ "type": "object",
+ "properties": {
+ "sealed_install": {
+ "type": "boolean",
+ "default": false,
+ "description": "If true, pack MUST run in sealed environment"
+ },
+ "sealed_requirements": {
+ "type": "object",
+ "properties": {
+ "min_bundle_version": {
+ "type": "string",
+ "description": "Minimum air-gap bundle version"
+ },
+ "max_advisory_staleness_hours": {
+ "type": "integer",
+ "minimum": 1,
+ "default": 168,
+ "description": "Maximum age of advisory data in hours"
+ },
+ "require_time_anchor": {
+ "type": "boolean",
+ "default": true,
+ "description": "Require valid time anchor"
+ },
+ "allowed_offline_duration_hours": {
+ "type": "integer",
+ "minimum": 1,
+ "default": 720,
+ "description": "Maximum allowed offline duration"
+ },
+ "require_signature_verification": {
+ "type": "boolean",
+ "default": true,
+ "description": "Require bundle signature verification"
+ }
+ }
+ }
+ }
+}
+```
+
+## 2. Environment Detection
+
+### 2.1 Sealed Mode Status API
+
+The Task Runner queries the AirGap Controller to determine sealed status:
+
+```
+GET /api/v1/airgap/status
+```
+
+Response:
+
+```json
+{
+ "sealed": true,
+ "mode": "sealed",
+ "sealed_at": "2025-12-01T00:00:00Z",
+ "sealed_by": "ops-admin@company.com",
+ "bundle_version": "2025.10.0",
+ "bundle_digest": "sha256:abc123...",
+ "last_advisory_update": "2025-12-01T00:00:00Z",
+ "advisory_staleness_hours": 120,
+ "time_anchor": {
+ "timestamp": "2025-12-01T00:00:00Z",
+ "signature": "base64...",
+ "valid": true,
+ "expires_at": "2025-12-31T00:00:00Z"
+ },
+ "egress_blocked": true,
+ "network_policy": "deny-all"
+}
+```
+
+### 2.2 Detection Heuristics
+
+If the AirGap Controller is unavailable, the Task Runner uses fallback heuristics:
+
+| Heuristic | Weight | Indicates |
+|-----------|--------|-----------|
+| No external DNS resolution | High | Sealed |
+| Blocked ports 80, 443 | High | Sealed |
+| AIRGAP_MODE=sealed env var | High | Sealed |
+| /etc/stellaops/sealed file exists | Medium | Sealed |
+| No internet connectivity | Medium | Sealed |
+| Local-only registry configured | Low | Sealed |
+
+Combined heuristic score threshold: **0.7** to consider environment sealed.
+
+## 3. Enforcement Logic
+
+### 3.1 Pre-Execution Check
+
+```csharp
+public sealed class SealedInstallEnforcer
+{
+ public async Task EnforceAsync(
+ TaskPack pack,
+ CancellationToken ct = default)
+ {
+ // If pack doesn't require sealed install, allow
+ if (!pack.SealedInstall)
+ {
+ return EnforcementResult.Allowed("Pack does not require sealed install");
+ }
+
+ // Get environment sealed status
+ var status = await _airgapController.GetStatusAsync(ct);
+
+ // Core check: environment must be sealed
+ if (!status.Sealed)
+ {
+ return EnforcementResult.Denied(
+ "SEALED_INSTALL_VIOLATION",
+ "Pack requires sealed environment but environment is not sealed",
+ new SealedInstallViolation
+ {
+ PackId = pack.PackId,
+ RequiredSealed = true,
+ ActualSealed = false,
+ Recommendation = "Activate sealed mode with: stella airgap seal"
+ });
+ }
+
+ // Check sealed requirements
+ if (pack.SealedRequirements != null)
+ {
+ var violations = ValidateRequirements(pack.SealedRequirements, status);
+ if (violations.Any())
+ {
+ return EnforcementResult.Denied(
+ "SEALED_REQUIREMENTS_VIOLATION",
+ "Sealed requirements not met",
+ violations);
+ }
+ }
+
+ return EnforcementResult.Allowed("Sealed install requirements satisfied");
+ }
+
+ private List ValidateRequirements(
+ SealedRequirements requirements,
+ SealedModeStatus status)
+ {
+ var violations = new List();
+
+ // Bundle version check
+ if (requirements.MinBundleVersion != null)
+ {
+ if (Version.Parse(status.BundleVersion) < Version.Parse(requirements.MinBundleVersion))
+ {
+ violations.Add(new RequirementViolation
+ {
+ Requirement = "min_bundle_version",
+ Expected = requirements.MinBundleVersion,
+ Actual = status.BundleVersion,
+ Message = $"Bundle version {status.BundleVersion} < required {requirements.MinBundleVersion}"
+ });
+ }
+ }
+
+ // Advisory staleness check
+ if (status.AdvisoryStalenessHours > requirements.MaxAdvisoryStalenessHours)
+ {
+ violations.Add(new RequirementViolation
+ {
+ Requirement = "max_advisory_staleness_hours",
+ Expected = requirements.MaxAdvisoryStalenessHours.ToString(),
+ Actual = status.AdvisoryStalenessHours.ToString(),
+ Message = $"Advisory data is {status.AdvisoryStalenessHours}h old, max allowed is {requirements.MaxAdvisoryStalenessHours}h"
+ });
+ }
+
+ // Time anchor check
+ if (requirements.RequireTimeAnchor && (status.TimeAnchor == null || !status.TimeAnchor.Valid))
+ {
+ violations.Add(new RequirementViolation
+ {
+ Requirement = "require_time_anchor",
+ Expected = "valid time anchor",
+ Actual = status.TimeAnchor?.Valid.ToString() ?? "missing",
+ Message = "Valid time anchor required but not present"
+ });
+ }
+
+ return violations;
+ }
+}
+```
+
+### 3.2 Decision Matrix
+
+| Pack `sealed_install` | Environment Sealed | Bundle Valid | Advisories Fresh | Result |
+|-----------------------|-------------------|--------------|------------------|--------|
+| `true` | `true` | `true` | `true` | ✅ RUN |
+| `true` | `true` | `true` | `false` | ⚠️ WARN + RUN (if within grace) |
+| `true` | `true` | `false` | * | ❌ DENY |
+| `true` | `false` | * | * | ❌ DENY |
+| `false` | `true` | * | * | ✅ RUN |
+| `false` | `false` | * | * | ✅ RUN |
+
+### 3.3 Grace Period Handling
+
+For advisory staleness, a grace period can be configured:
+
+```yaml
+# etc/taskrunner.yaml
+enforcement:
+ sealed_install:
+ staleness_grace_period_hours: 24
+ staleness_warning_threshold_hours: 120
+ deny_on_staleness: true # or false for warn-only
+```
+
+## 4. Refusal Semantics
+
+### 4.1 Error Response
+
+When enforcement denies execution:
+
+```json
+{
+ "error": {
+ "code": "SEALED_INSTALL_VIOLATION",
+ "message": "Pack requires sealed environment but environment is not sealed",
+ "details": {
+ "pack_id": "compliance-scan-airgap",
+ "pack_version": "1.0.0",
+ "sealed_install_required": true,
+ "environment_sealed": false,
+ "violations": [],
+ "recommendation": "Activate sealed mode with: stella airgap seal"
+ }
+ },
+ "status": "rejected",
+ "rejected_at": "2025-12-06T10:00:00Z"
+}
+```
+
+### 4.2 CLI Exit Codes
+
+| Code | Name | Description |
+|------|------|-------------|
+| 40 | `SEALED_INSTALL_VIOLATION` | Pack requires sealed but environment is not |
+| 41 | `BUNDLE_VERSION_VIOLATION` | Bundle version below minimum |
+| 42 | `ADVISORY_STALENESS_VIOLATION` | Advisory data too stale |
+| 43 | `TIME_ANCHOR_VIOLATION` | Time anchor missing or invalid |
+| 44 | `SIGNATURE_VERIFICATION_VIOLATION` | Bundle signature verification failed |
+
+### 4.3 Audit Logging
+
+All enforcement decisions are logged:
+
+```json
+{
+ "event_type": "sealed_install_enforcement",
+ "timestamp": "2025-12-06T10:00:00Z",
+ "pack_id": "compliance-scan-airgap",
+ "pack_version": "1.0.0",
+ "decision": "denied",
+ "reason": "SEALED_INSTALL_VIOLATION",
+ "environment": {
+ "sealed": false,
+ "bundle_version": null,
+ "advisory_staleness_hours": null
+ },
+ "user": "task-runner-service",
+ "tenant_id": "550e8400-e29b-41d4-a716-446655440000"
+}
+```
+
+## 5. Integration Points
+
+### 5.1 Task Runner Integration
+
+```csharp
+// In TaskRunner execution pipeline
+public async Task ExecuteAsync(TaskPack pack, TaskContext context)
+{
+ // Pre-execution enforcement
+ var enforcement = await _sealedInstallEnforcer.EnforceAsync(pack);
+ if (!enforcement.Allowed)
+ {
+ await _auditLogger.LogEnforcementDenialAsync(pack, enforcement);
+ return TaskResult.Rejected(enforcement);
+ }
+
+ // Continue with execution
+ return await _executor.ExecuteAsync(pack, context);
+}
+```
+
+### 5.2 CLI Integration
+
+```bash
+# Check sealed status before running pack
+$ stella pack run compliance-scan-airgap
+
+Error: Sealed install violation
+ Pack 'compliance-scan-airgap' requires a sealed environment.
+
+ Current environment:
+ Sealed: false
+
+ To resolve:
+ 1. Import an air-gap bundle: stella airgap import
+ 2. Activate sealed mode: stella airgap seal
+ 3. Verify status: stella airgap status
+
+ Exit code: 40
+```
+
+## 6. Configuration
+
+### 6.1 Task Runner Configuration
+
+```yaml
+# etc/taskrunner.yaml
+enforcement:
+ sealed_install:
+ enabled: true
+
+ # Staleness handling
+ staleness_grace_period_hours: 24
+ staleness_warning_threshold_hours: 120
+ deny_on_staleness: true
+
+ # Fallback detection
+ use_heuristic_detection: true
+ heuristic_threshold: 0.7
+
+ # Logging
+ log_all_decisions: true
+ audit_retention_days: 365
+```
+
+### 6.2 Environment Variables
+
+| Variable | Description | Default |
+|----------|-------------|---------|
+| `AIRGAP_MODE` | Force sealed mode detection | — |
+| `AIRGAP_CONTROLLER_URL` | AirGap controller endpoint | `http://localhost:8080` |
+| `SEALED_INSTALL_BYPASS` | Bypass enforcement (dev only) | `false` |
+
+## 7. Tasks Unblocked
+
+This contract unblocks:
+
+| Task ID | Description | Status |
+|---------|-------------|--------|
+| TASKRUN-AIRGAP-57-001 | Sealed install enforcement contract | ✅ UNBLOCKED |
+| TASKRUN-AIRGAP-58-001 | Sealed install CLI integration | ✅ UNBLOCKED |
+
+## 8. Changelog
+
+| Date | Version | Change |
+|------|---------|--------|
+| 2025-12-06 | 1.0.0 | Initial contract with enforcement logic, decision matrix, CLI integration |
diff --git a/docs/implplan/SPRINT_0139_0001_0001_scanner_bun.md b/docs/implplan/SPRINT_0139_0001_0001_scanner_bun.md
index 2caa1958e..655afb13d 100644
--- a/docs/implplan/SPRINT_0139_0001_0001_scanner_bun.md
+++ b/docs/implplan/SPRINT_0139_0001_0001_scanner_bun.md
@@ -38,28 +38,28 @@
## Delivery Tracker
| # | Task ID | Status | Key dependency / next step | Owners | Task Definition |
| --- | --- | --- | --- | --- | --- |
-| P1 | PREP-SCANNER-BUN-001-DESIGN-DOC | TODO | Due TBD · Accountable: Bun Analyzer Guild | Bun Analyzer Guild | Document Bun analyzer design at `docs/modules/scanner/prep/bun-analyzer-design.md` covering lockfile schema, discovery heuristics, evidence model, and CLI/WebService contract. |
-| 1 | SCANNER-BUN-001 | TODO | Await P1 | Bun Analyzer Guild | Create project scaffold: `StellaOps.Scanner.Analyzers.Lang.Bun.csproj`, plugin manifest (`manifest.json`), and `BunAnalyzerPlugin` implementing `ILanguageAnalyzerPlugin`. |
-| 2 | SCANNER-BUN-002 | TODO | Depends on task 1 | Bun Analyzer Guild | Implement `BunProjectDiscoverer`: identify candidate roots by presence of `package.json` + (`bun.lock` \| `bun.lockb` \| `bunfig.toml` \| `node_modules/.bun/`). |
-| 3 | SCANNER-BUN-003 | TODO | Depends on task 2 | Bun Analyzer Guild | Implement `BunInputNormalizer`: classify each root as installed-path (node_modules exists) or lockfile-path (`bun.lock` only) or unsupported (`bun.lockb` only). |
-| 4 | SCANNER-BUN-004 | TODO | Depends on task 3 | Bun Analyzer Guild | Implement `bun.lockb` unsupported handler: emit remediation finding with migration command (`bun install --save-text-lockfile`). |
-| 5 | SCANNER-BUN-005 | TODO | Depends on task 3 | Bun Analyzer Guild | Implement `BunLockParser`: tolerant JSONC parser for `bun.lock` text format; extract (name, version, resolved, integrity) tuples. |
-| 6 | SCANNER-BUN-006 | TODO | Depends on task 5 | Bun Analyzer Guild | Implement dev/prod dependency filtering for lockfile path; expose `include_dev` configuration option. |
-| 7 | SCANNER-BUN-007 | TODO | Depends on task 3 | Bun Analyzer Guild | Implement `BunInstalledCollector`: traverse `node_modules/**/package.json` and `node_modules/.bun/**/package.json` with symlink-safe walker. |
-| 8 | SCANNER-BUN-008 | TODO | Depends on task 7 | Bun Analyzer Guild | Implement symlink safety: follow symlinks only within root prefix; maintain visited inode/realpath set; record logical + real paths for evidence. |
-| 9 | SCANNER-BUN-009 | TODO | Depends on task 7 | Bun Analyzer Guild | Extract package metadata from `package.json`: name, version, private flag; attach lockfile evidence (resolved, integrity) when available. |
-| 10 | SCANNER-BUN-010 | TODO | Depends on tasks 5, 9 | Bun Analyzer Guild | Implement `BunPackageNormalizer`: deduplicate by (name, version); accumulate occurrence paths for traceability; emit `LanguageComponentRecord`. |
-| 11 | SCANNER-BUN-011 | TODO | Depends on task 10 | Bun Analyzer Guild | PURL generation: emit `pkg:npm/@` with correct scoped-package encoding (`@scope/pkg` → `%40scope/pkg`). |
-| 12 | SCANNER-BUN-012 | TODO | Depends on task 10 | Bun Analyzer Guild | Evidence emission: attach `LanguageComponentEvidence` with kind (File/Metadata), source (`node_modules`/`bun.lock`), locator (path), and optional sha256. |
-| 13 | SCANNER-BUN-013 | TODO | Depends on task 12 | Bun Analyzer Guild | Assemble `BunLanguageAnalyzer` orchestrating discovery → input normalization → collection → normalization → emit via `LanguageComponentWriter`. |
-| 14 | SCANNER-BUN-014 | TODO | Depends on task 13 | Bun Analyzer Guild | Performance guards: implement max-files-per-root cap, max-symlink-depth limit, prefix pruning to avoid full image traversal. |
-| 15 | SCANNER-BUN-015 | TODO | Depends on task 13 | QA Guild | Create test project `StellaOps.Scanner.Analyzers.Lang.Bun.Tests` with golden fixture harness using `LanguageAnalyzerTestHarness.AssertDeterministicAsync`. |
-| 16 | SCANNER-BUN-016 | TODO | Depends on task 15 | QA Guild | Fixture: Standard Bun install (hoisted/default linker) with `node_modules` and `bun.lock`; verify installed inventory path. |
-| 17 | SCANNER-BUN-017 | TODO | Depends on task 15 | QA Guild | Fixture: Isolated linker install (`bun install --linker isolated`) with packages under `node_modules/.bun/`; verify `.bun/` traversal. |
-| 18 | SCANNER-BUN-018 | TODO | Depends on task 15 | QA Guild | Fixture: Lockfile-only image (no `node_modules`); verify lockfile inventory path and dev/prod filtering. |
-| 19 | SCANNER-BUN-019 | TODO | Depends on task 15 | QA Guild | Fixture: Binary lockfile only (`bun.lockb`); verify unsupported remediation message emitted. |
-| 20 | SCANNER-BUN-020 | TODO | Depends on task 15 | QA Guild | Fixture: Monorepo/workspaces with multiple `package.json` under single lock; verify workspace member handling. |
-| 21 | SCANNER-BUN-021 | TODO | Depends on task 15 | QA Guild | Fixture: Symlink corner cases (verify no traversal outside root, no infinite loops, both logical/real paths in evidence). |
+| P1 | PREP-SCANNER-BUN-001-DESIGN-DOC | DONE (2025-12-06) | Design doc at `docs/modules/scanner/prep/bun-analyzer-design.md` | Bun Analyzer Guild | Document Bun analyzer design at `docs/modules/scanner/prep/bun-analyzer-design.md` covering lockfile schema, discovery heuristics, evidence model, and CLI/WebService contract. |
+| 1 | SCANNER-BUN-001 | DONE (2025-12-06) | Scaffold at `StellaOps.Scanner.Analyzers.Lang.Bun` | Bun Analyzer Guild | Create project scaffold: `StellaOps.Scanner.Analyzers.Lang.Bun.csproj`, plugin manifest (`manifest.json`), and `BunAnalyzerPlugin` implementing `ILanguageAnalyzerPlugin`. |
+| 2 | SCANNER-BUN-002 | DONE (2025-12-06) | `BunProjectDiscoverer.cs` implemented | Bun Analyzer Guild | Implement `BunProjectDiscoverer`: identify candidate roots by presence of `package.json` + (`bun.lock` \| `bun.lockb` \| `bunfig.toml` \| `node_modules/.bun/`). |
+| 3 | SCANNER-BUN-003 | DONE (2025-12-06) | `BunInputNormalizer.cs` implemented | Bun Analyzer Guild | Implement `BunInputNormalizer`: classify each root as installed-path (node_modules exists) or lockfile-path (`bun.lock` only) or unsupported (`bun.lockb` only). |
+| 4 | SCANNER-BUN-004 | DONE (2025-12-06) | `EmitBinaryLockfileRemediation` in BunLanguageAnalyzer | Bun Analyzer Guild | Implement `bun.lockb` unsupported handler: emit remediation finding with migration command (`bun install --save-text-lockfile`). |
+| 5 | SCANNER-BUN-005 | DONE (2025-12-06) | `BunLockParser.cs` with JSONC support | Bun Analyzer Guild | Implement `BunLockParser`: tolerant JSONC parser for `bun.lock` text format; extract (name, version, resolved, integrity) tuples. |
+| 6 | SCANNER-BUN-006 | DONE (2025-12-06) | `IncludeDev` in BunInputClassification | Bun Analyzer Guild | Implement dev/prod dependency filtering for lockfile path; expose `include_dev` configuration option. |
+| 7 | SCANNER-BUN-007 | DONE (2025-12-06) | `BunInstalledCollector.cs` implemented | Bun Analyzer Guild | Implement `BunInstalledCollector`: traverse `node_modules/**/package.json` and `node_modules/.bun/**/package.json` with symlink-safe walker. |
+| 8 | SCANNER-BUN-008 | DONE (2025-12-06) | Symlink safety in BunInstalledCollector | Bun Analyzer Guild | Implement symlink safety: follow symlinks only within root prefix; maintain visited inode/realpath set; record logical + real paths for evidence. |
+| 9 | SCANNER-BUN-009 | DONE (2025-12-06) | `TryParsePackage` in BunInstalledCollector | Bun Analyzer Guild | Extract package metadata from `package.json`: name, version, private flag; attach lockfile evidence (resolved, integrity) when available. |
+| 10 | SCANNER-BUN-010 | DONE (2025-12-06) | `BunPackageNormalizer.cs` implemented | Bun Analyzer Guild | Implement `BunPackageNormalizer`: deduplicate by (name, version); accumulate occurrence paths for traceability; emit `LanguageComponentRecord`. |
+| 11 | SCANNER-BUN-011 | DONE (2025-12-06) | `BuildPurl` in BunPackage | Bun Analyzer Guild | PURL generation: emit `pkg:npm/@` with correct scoped-package encoding (`@scope/pkg` → `%40scope/pkg`). |
+| 12 | SCANNER-BUN-012 | DONE (2025-12-06) | `CreateEvidence` in BunPackage | Bun Analyzer Guild | Evidence emission: attach `LanguageComponentEvidence` with kind (File/Metadata), source (`node_modules`/`bun.lock`), locator (path), and optional sha256. |
+| 13 | SCANNER-BUN-013 | DONE (2025-12-06) | `BunLanguageAnalyzer.cs` orchestration complete | Bun Analyzer Guild | Assemble `BunLanguageAnalyzer` orchestrating discovery → input normalization → collection → normalization → emit via `LanguageComponentWriter`. |
+| 14 | SCANNER-BUN-014 | DONE (2025-12-06) | MaxFilesPerRoot/MaxSymlinkDepth guards in place | Bun Analyzer Guild | Performance guards: implement max-files-per-root cap, max-symlink-depth limit, prefix pruning to avoid full image traversal. |
+| 15 | SCANNER-BUN-015 | DONE (2025-12-06) | Test project with 6 test methods | QA Guild | Create test project `StellaOps.Scanner.Analyzers.Lang.Bun.Tests` with golden fixture harness using `LanguageAnalyzerTestHarness.AssertDeterministicAsync`. |
+| 16 | SCANNER-BUN-016 | DONE (2025-12-06) | `StandardInstallProducesDeterministicOutputAsync` test | QA Guild | Fixture: Standard Bun install (hoisted/default linker) with `node_modules` and `bun.lock`; verify installed inventory path. |
+| 17 | SCANNER-BUN-017 | DONE (2025-12-06) | `IsolatedLinkerInstallIsParsedAsync` test | QA Guild | Fixture: Isolated linker install (`bun install --linker isolated`) with packages under `node_modules/.bun/`; verify `.bun/` traversal. |
+| 18 | SCANNER-BUN-018 | DONE (2025-12-06) | `LockfileOnlyIsParsedAsync` test | QA Guild | Fixture: Lockfile-only image (no `node_modules`); verify lockfile inventory path and dev/prod filtering. |
+| 19 | SCANNER-BUN-019 | DONE (2025-12-06) | `BinaryLockfileEmitsRemediationAsync` test | QA Guild | Fixture: Binary lockfile only (`bun.lockb`); verify unsupported remediation message emitted. |
+| 20 | SCANNER-BUN-020 | DONE (2025-12-06) | `WorkspacesAreParsedAsync` test | QA Guild | Fixture: Monorepo/workspaces with multiple `package.json` under single lock; verify workspace member handling. |
+| 21 | SCANNER-BUN-021 | DONE (2025-12-06) | `SymlinkSafetyIsEnforcedAsync` test | QA Guild | Fixture: Symlink corner cases (verify no traversal outside root, no infinite loops, both logical/real paths in evidence). |
| 22 | SCANNER-BUN-022 | TODO | Depends on task 14 | CLI Guild | Implement `stellaops-cli bun inspect` verb: display Bun package inventory for local root or scan ID; wire into `CommandFactory`. |
| 23 | SCANNER-BUN-023 | TODO | Depends on task 22 | CLI Guild | Implement `stellaops-cli bun resolve` verb: resolve Bun packages by scan ID, digest, or image reference with JSON/table output. |
| 24 | SCANNER-BUN-024 | TODO | Depends on task 23 | CLI Guild | Add CLI unit tests for Bun verbs (`CommandFactoryTests`, JSON output assertions); update CLI help text and golden outputs. |
@@ -72,6 +72,7 @@
## Execution Log
| Date (UTC) | Update | Owner |
| --- | --- | --- |
+| 2025-12-06 | Completed P1 through 21 (Waves A–D): Created design doc at `docs/modules/scanner/prep/bun-analyzer-design.md`. Verified core analyzer implementation in `StellaOps.Scanner.Analyzers.Lang.Bun`: BunAnalyzerPlugin, BunLanguageAnalyzer, BunProjectDiscoverer, BunInputNormalizer, BunLockParser (JSONC with git/tarball/workspace source detection), BunInstalledCollector (symlink-safe), BunPackageNormalizer, BunPackage (PURL + evidence). Performance guards (MaxFilesPerRoot=50000, MaxSymlinkDepth=10) in place. Test project with 6 golden fixture tests. Build succeeds. | Implementer |
| 2025-12-05 | Sprint file created from product advisory; 29 tasks across 6 waves (A–F) covering core analyzer, testing, CLI/WebService/Worker integration, and docs. | Planning |
## Decisions & Risks
diff --git a/docs/implplan/SPRINT_0157_0001_0001_taskrunner_i.md b/docs/implplan/SPRINT_0157_0001_0001_taskrunner_i.md
index cfd85dad0..8810d99dd 100644
--- a/docs/implplan/SPRINT_0157_0001_0001_taskrunner_i.md
+++ b/docs/implplan/SPRINT_0157_0001_0001_taskrunner_i.md
@@ -25,9 +25,9 @@
| 2 | TASKRUN-AIRGAP-56-002 | DONE (2025-12-03) | Helper delivered; downstream AIRGAP-57/58 await controller/importer bundle specs. | Task Runner Guild · AirGap Importer Guild | Add helper steps for bundle ingestion (checksum verification, staging to object store) with deterministic outputs. |
| 3 | TASKRUN-AIRGAP-57-001 | BLOCKED (2025-11-30) | Depends on 56-002; awaiting sealed-install enforcement contract. | Task Runner Guild · AirGap Controller Guild | Refuse to execute plans when environment sealed=false but declared sealed install; emit advisory timeline events. |
| 4 | TASKRUN-AIRGAP-58-001 | BLOCKED (2025-11-30) | Depends on 57-001. | Task Runner Guild · Evidence Locker Guild | Capture bundle import job transcripts, hashed inputs/outputs into portable evidence bundles. |
-| 5 | TASKRUN-42-001 | TODO | ✅ Control-flow contract at `docs/schemas/taskpack-control-flow.schema.json`; proceed with execution engine upgrades (loops/conditionals/maxParallel), simulation mode, policy gate integration, deterministic failure recovery. | Task Runner Guild (`src/TaskRunner/StellaOps.TaskRunner`) | Execution engine enhancements + simulation API/CLI. |
-| 6 | TASKRUN-OAS-61-001 | TODO | ✅ Control-flow contract published 2025-12-06; proceed with OAS freeze. | Task Runner Guild · API Contracts Guild | Document TaskRunner APIs (pack runs, logs, approvals) with streaming schemas/examples. |
-| 7 | TASKRUN-OAS-61-002 | TODO | Depends on 61-001; ready once OAS documented. | Task Runner Guild | Expose `GET /.well-known/openapi` returning signed spec metadata, build version, ETag. |
+| 5 | TASKRUN-42-001 | DONE (2025-12-06) | Implemented Loop/Conditional step kinds, extended execution graph/simulation engine, added manifest/planner/validator support, 128 tests passing. | Task Runner Guild (`src/TaskRunner/StellaOps.TaskRunner`) | Execution engine enhancements + simulation API/CLI. |
+| 6 | TASKRUN-OAS-61-001 | DONE (2025-12-06) | Created `docs/api/taskrunner-openapi.yaml` with full API documentation including streaming logs (NDJSON), loop/conditional/policy gate schemas. | Task Runner Guild · API Contracts Guild | Document TaskRunner APIs (pack runs, logs, approvals) with streaming schemas/examples. |
+| 7 | TASKRUN-OAS-61-002 | TODO | ✅ 61-001 DONE; endpoint already implemented in Program.cs; needs signing integration. | Task Runner Guild | Expose `GET /.well-known/openapi` returning signed spec metadata, build version, ETag. |
| 8 | TASKRUN-OAS-62-001 | TODO | Depends on 61-002. | Task Runner Guild · SDK Generator Guild | SDK examples for pack run lifecycle; streaming log helpers; paginator wrappers. |
| 9 | TASKRUN-OAS-63-001 | TODO | Depends on 62-001. | Task Runner Guild · API Governance Guild | Sunset/deprecation headers + notifications for legacy pack APIs. |
| 10 | TASKRUN-OBS-50-001 | DONE (2025-11-25) | Telemetry core adoption. | Task Runner Guild | Add telemetry core in host + worker; spans/logs include `trace_id`, `tenant_id`, `run_id`, scrubbed transcripts. |
@@ -56,6 +56,8 @@
## Execution Log
| Date (UTC) | Update | Owner |
| --- | --- | --- |
+| 2025-12-06 | TASKRUN-OAS-61-001 DONE: Created `docs/api/taskrunner-openapi.yaml` OpenAPI 3.1 specification documenting all TaskRunner WebService APIs: POST /v1/task-runner/simulations (simulate task pack), POST /v1/task-runner/runs (create run), GET /v1/task-runner/runs/{runId} (get state), GET /v1/task-runner/runs/{runId}/logs (NDJSON streaming), GET /v1/task-runner/runs/{runId}/artifacts (list artifacts), POST /v1/task-runner/runs/{runId}/approvals/{approvalId} (apply decision), POST /v1/task-runner/runs/{runId}/cancel (cancel run), GET /.well-known/openapi (metadata). Includes LoopInfo, ConditionalInfo, PolicyInfo schemas for new control-flow steps. Examples provided for all endpoints. | Implementer |
+| 2025-12-06 | TASKRUN-42-001 DONE: Extended `PackRunStepKind` enum with `Loop` and `Conditional`. Added `PackRunLoopConfig`, `PackRunConditionalConfig`, `PackRunPolicyGateConfig` record types to `PackRunExecutionGraph.cs`. Updated `PackRunExecutionGraphBuilder` to extract loop/conditional/policy gate configs. Extended `PackRunSimulationEngine` and `PackRunSimulationModels.cs` with `WillIterate`/`WillBranch` statuses and simulation info records. Added `TaskPackLoopStep`, `TaskPackConditionalStep` manifest models. Updated `TaskPackPlanner` with `BuildLoopStep`/`BuildConditionalStep` methods. Updated `TaskPackManifestValidator` for loop/conditional validation. Added 3 new simulation tests (loop, conditional, policy gate); 128 total tests passing. | Implementer |
| 2025-12-06 | TASKRUN-OBS-53-001 DONE: Created `PackRunEvidenceSnapshot.cs` domain model with Merkle root computation for hash chain integrity. Created `IPackRunEvidenceSnapshotService.cs` with service for capturing run completion, step execution, approval decisions, and policy evaluations. Created `IPackRunEvidenceStore.cs` with InMemoryPackRunEvidenceStore for testing. Created `IPackRunRedactionGuard.cs` with PackRunRedactionGuard for sensitive data redaction (bearer tokens, passwords, emails, identities). Added 29 comprehensive tests in `PackRunEvidenceSnapshotTests.cs`. Build verified (0 errors), all tests passing. | Implementer |
| 2025-12-06 | TASKRUN-OBS-52-001 DONE: Created `PackRunTimelineEvent.cs` domain model per timeline-event.schema.json with event types (pack.started, pack.step.completed, pack.failed, etc.). Created `PackRunTimelineEventEmitter.cs` with retry logic and deterministic batch ordering. Created `IPackRunTimelineEventSink.cs` with InMemoryPackRunTimelineEventSink for testing. Added 32 comprehensive tests in `PackRunTimelineEventTests.cs`. Build verified (0 errors), all tests passing. | Implementer |
| 2025-12-05 | **OBS Unblocked:** TASKRUN-OBS-52-001 and TASKRUN-OBS-53-001 changed from BLOCKED to TODO. Root blocker resolved: `timeline-event.schema.json` created 2025-12-04 per BLOCKED_DEPENDENCY_TREE.md Section 8.3. | Implementer |
diff --git a/docs/modules/scanner/prep/bun-analyzer-design.md b/docs/modules/scanner/prep/bun-analyzer-design.md
new file mode 100644
index 000000000..b526e5a85
--- /dev/null
+++ b/docs/modules/scanner/prep/bun-analyzer-design.md
@@ -0,0 +1,282 @@
+# Bun Analyzer Design — PREP-SCANNER-BUN-001-DESIGN-DOC
+
+Status: Draft (2025-12-06)
+Owners: Bun Analyzer Guild · Scanner Guild
+Scope: Bun package manager analyzer for npm-ecosystem vulnerability scanning in container filesystems.
+
+## Overview
+
+The Bun analyzer extracts npm-ecosystem package inventory from Bun-managed JavaScript/TypeScript projects. Bun uses npm-compatible package.json and produces packages in `node_modules`, making it similar to the Node analyzer but with distinct lockfile formats and installation structures.
+
+## Supported Artifacts
+
+### Lockfile Formats
+
+| Format | Extension | Status | Notes |
+|--------|-----------|--------|-------|
+| Text lockfile | `bun.lock` | Supported | JSONC format with package metadata |
+| Binary lockfile | `bun.lockb` | Unsupported | Undocumented binary format; emit migration guidance |
+
+### Installation Structures
+
+| Structure | Discovery Pattern | Notes |
+|-----------|-------------------|-------|
+| Default (hoisted) | `node_modules/**/package.json` | Standard flat structure |
+| Isolated linker | `node_modules/.bun/**/package.json` | Symlink-heavy, requires safe traversal |
+
+## Discovery Heuristics
+
+### Project Root Detection
+
+A directory is considered a Bun project root when:
+
+1. `package.json` exists, AND
+2. One or more of:
+ - `bun.lock` exists (text lockfile)
+ - `bun.lockb` exists (binary lockfile — triggers unsupported message)
+ - `bunfig.toml` exists (Bun configuration)
+ - `node_modules/.bun/` exists (isolated linker marker)
+
+### Input Classification
+
+```
+BunInputNormalizer classifies each root:
+├── InstalledPath: node_modules exists → traverse installed packages
+├── LockfilePath: bun.lock only (no node_modules) → parse lockfile
+└── Unsupported: bun.lockb only → emit remediation finding
+```
+
+## Lockfile Schema (`bun.lock`)
+
+The `bun.lock` text format is a JSONC variant (JSON with comments and trailing commas):
+
+```jsonc
+{
+ "lockfileVersion": 1,
+ "workspaces": {
+ "": {
+ "name": "my-app",
+ "dependencies": {
+ "lodash": "^4.17.21"
+ }
+ }
+ },
+ "packages": {
+ "lodash@4.17.21": {
+ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
+ "integrity": "sha512-v2kDEe57...",
+ "dependencies": {}
+ }
+ }
+}
+```
+
+### Extracted Fields
+
+| Field | Source | Usage |
+|-------|--------|-------|
+| name | packages key (before `@version`) | PURL name component |
+| version | packages key (after `@`) | PURL version component |
+| resolved | packages[].resolved | Evidence locator |
+| integrity | packages[].integrity | Evidence hash (sha512/sha256) |
+
+## Evidence Model
+
+### LanguageComponentEvidence Structure
+
+```csharp
+record BunPackageEvidence(
+ LanguageEvidenceKind Kind, // File | Metadata | Lockfile
+ string Source, // "node_modules" | "bun.lock"
+ string Locator, // File path or registry URL
+ string? Content, // package.json content (if File)
+ string? Sha256); // Content hash
+```
+
+### Evidence Collection Matrix
+
+| Source | Kind | Locator | Content | Hash |
+|--------|------|---------|---------|------|
+| `node_modules/**/package.json` | File | Relative path | JSON content | sha256 of content |
+| `bun.lock` | Lockfile | `bun.lock:packages[name@version]` | null | null |
+| Registry resolution | Metadata | resolved URL | null | integrity value |
+
+## PURL Generation
+
+Bun packages are npm packages with `bun` package manager qualifier:
+
+```
+pkg:npm/@?package_manager=bun
+```
+
+### Scoped Package Encoding
+
+| Raw Name | Encoded PURL |
+|----------|--------------|
+| `lodash` | `pkg:npm/lodash@4.17.21?package_manager=bun` |
+| `@types/node` | `pkg:npm/%40types/node@20.10.0?package_manager=bun` |
+| `@org/pkg` | `pkg:npm/%40org/pkg@1.0.0?package_manager=bun` |
+
+## Symlink Safety
+
+Bun's isolated linker creates symlink-heavy structures. Safety requirements:
+
+1. **Prefix Containment**: Only follow symlinks that resolve within root path
+2. **Cycle Detection**: Maintain visited inode/realpath set
+3. **Path Recording**: Record both logical path (symlink) and real path (target)
+4. **Depth Limit**: Cap symlink depth at 32 levels (configurable)
+
+```csharp
+record SymlinkSafetyContext(
+ string RootPrefix,
+ HashSet<(long Inode, long Device)> VisitedInodes,
+ int MaxDepth = 32);
+```
+
+## Performance Guards
+
+| Guard | Default | Rationale |
+|-------|---------|-----------|
+| max-files-per-root | 50,000 | Prevent full image traversal |
+| max-symlink-depth | 32 | Avoid infinite loops in malformed structures |
+| prefix-pruning | enabled | Skip paths outside expected locations |
+| timeout-per-root | 60s | Bound analysis time per project |
+
+## CLI Contract
+
+### `stellaops-cli bun inspect`
+
+Display Bun package inventory for local root or scan ID:
+
+```bash
+# Local analysis
+stellaops-cli bun inspect /path/to/project
+
+# Remote scan lookup
+stellaops-cli bun inspect --scan-id abc123
+```
+
+Output formats: `--output json|table|ndjson`
+
+### `stellaops-cli bun resolve`
+
+Resolve Bun packages by scan ID, digest, or image reference:
+
+```bash
+stellaops-cli bun resolve --scan-id abc123 --package lodash
+stellaops-cli bun resolve --digest sha256:abc... --format json
+```
+
+## WebService Contract
+
+### `GET /api/scans/{scanId}/bun-packages`
+
+Returns inventory of Bun packages for a completed scan.
+
+Query parameters:
+- `page`, `pageSize`: Pagination
+- `name`: Filter by package name (prefix match)
+- `scope`: Filter by npm scope
+
+Response schema:
+```json
+{
+ "scanId": "abc123",
+ "analyzer": "bun",
+ "packages": [
+ {
+ "name": "lodash",
+ "version": "4.17.21",
+ "purl": "pkg:npm/lodash@4.17.21?package_manager=bun",
+ "evidence": [
+ {
+ "kind": "File",
+ "source": "node_modules",
+ "locator": "node_modules/lodash/package.json",
+ "sha256": "abc..."
+ }
+ ]
+ }
+ ],
+ "total": 150,
+ "page": 1,
+ "pageSize": 50
+}
+```
+
+## Unsupported Artifact Handling
+
+### Binary Lockfile (`bun.lockb`)
+
+When only `bun.lockb` is present (no `bun.lock` or `node_modules`):
+
+1. Emit remediation finding with severity `Info`
+2. Provide migration command: `bun install --save-text-lockfile`
+3. Skip package enumeration (no false negatives from partial binary parse)
+
+```csharp
+record BunLockbUnsupportedFinding(
+ string Path,
+ string RemediationCommand = "bun install --save-text-lockfile",
+ string Reason = "Binary lockfile format is undocumented and unstable");
+```
+
+## Test Fixtures
+
+| Fixture | Purpose | Validation |
+|---------|---------|------------|
+| `hoisted-install` | Standard Bun install with `node_modules` + `bun.lock` | Installed inventory path |
+| `isolated-linker` | `bun install --linker isolated` structure | `.bun/` traversal |
+| `lockfile-only` | No `node_modules`, only `bun.lock` | Lockfile inventory, dev/prod filtering |
+| `binary-lockfile-only` | Only `bun.lockb` present | Unsupported remediation message |
+| `monorepo-workspaces` | Multiple `package.json` under single lock | Workspace member handling |
+| `symlink-cycles` | Malformed structure with cycles | Cycle detection, no infinite loops |
+
+## Configuration
+
+### Environment Variables
+
+| Variable | Default | Description |
+|----------|---------|-------------|
+| `STELLAOPS_BUN_MAX_FILES` | 50000 | Max files per root |
+| `STELLAOPS_BUN_MAX_SYMLINK_DEPTH` | 32 | Max symlink traversal depth |
+| `STELLAOPS_BUN_INCLUDE_DEV` | true | Include dev dependencies |
+| `STELLAOPS_BUN_TIMEOUT_SECONDS` | 60 | Per-root analysis timeout |
+
+### appsettings.json
+
+```json
+{
+ "Scanner": {
+ "Analyzers": {
+ "Bun": {
+ "MaxFilesPerRoot": 50000,
+ "MaxSymlinkDepth": 32,
+ "IncludeDevDependencies": true,
+ "TimeoutSeconds": 60
+ }
+ }
+ }
+}
+```
+
+## Determinism Requirements
+
+1. **Sorted Output**: Packages ordered by `(name, version)` tuple
+2. **Stable IDs**: Component keys computed as `sha256(analyzerId + purl)`
+3. **Reproducible Evidence**: Evidence ordered by `(kind, source, locator)`
+4. **No Timestamps**: Evidence does not include file modification times
+5. **Canonical Paths**: All paths normalized (forward slashes, no trailing slash)
+
+## Open Decisions
+
+1. **Dev Dependency Default**: Currently `include_dev: true` for lockfile-only scans — confirm with Policy Guild
+2. **Workspace Handling**: Whether to emit separate inventory per workspace or merged — await monorepo fixture results
+3. **PURL Qualifier**: Using `?package_manager=bun` vs no qualifier — coordinate with Concelier linkset resolution
+
+## Handoff
+
+This document serves as the PREP artifact for PREP-SCANNER-BUN-001-DESIGN-DOC. Update upon:
+- Policy Guild confirmation of dev dependency defaults
+- Concelier Guild decision on PURL qualifier handling
+- Fixture suite completion revealing edge cases
diff --git a/docs/schemas/export-profiles.schema.json b/docs/schemas/export-profiles.schema.json
new file mode 100644
index 000000000..0eba46ef2
--- /dev/null
+++ b/docs/schemas/export-profiles.schema.json
@@ -0,0 +1,502 @@
+{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://stella-ops.org/schemas/export-profiles.schema.json",
+ "title": "StellaOps Export Profiles Schema",
+ "description": "Schema for CLI export profiles, scheduling, and distribution configuration. Unblocks CLI-EXPORT-35-001.",
+ "type": "object",
+ "definitions": {
+ "ExportProfile": {
+ "type": "object",
+ "required": ["profile_id", "name", "format", "created_at"],
+ "properties": {
+ "profile_id": {
+ "type": "string",
+ "format": "uuid",
+ "description": "Unique identifier for the export profile"
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 128,
+ "description": "Human-readable profile name"
+ },
+ "description": {
+ "type": "string",
+ "maxLength": 512
+ },
+ "format": {
+ "$ref": "#/definitions/ExportFormat"
+ },
+ "filters": {
+ "$ref": "#/definitions/ExportFilters"
+ },
+ "schedule": {
+ "$ref": "#/definitions/ExportSchedule"
+ },
+ "distribution": {
+ "$ref": "#/definitions/Distribution"
+ },
+ "retention": {
+ "$ref": "#/definitions/RetentionPolicy"
+ },
+ "signing": {
+ "$ref": "#/definitions/SigningConfig"
+ },
+ "metadata": {
+ "type": "object",
+ "additionalProperties": true
+ },
+ "enabled": {
+ "type": "boolean",
+ "default": true
+ },
+ "tenant_id": {
+ "type": "string",
+ "format": "uuid"
+ },
+ "created_at": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "updated_at": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "created_by": {
+ "type": "string"
+ }
+ }
+ },
+ "ExportFormat": {
+ "type": "object",
+ "required": ["type"],
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": ["sbom", "vex", "attestation", "evidence", "risk-report", "compliance-report", "airgap-bundle"]
+ },
+ "variant": {
+ "type": "string",
+ "enum": ["cyclonedx-1.6", "spdx-3.0.1", "openvex", "csaf-vex", "in-toto", "dsse", "json", "csv", "pdf"],
+ "description": "Format variant for the export type"
+ },
+ "options": {
+ "type": "object",
+ "properties": {
+ "include_signatures": {
+ "type": "boolean",
+ "default": true
+ },
+ "include_provenance": {
+ "type": "boolean",
+ "default": false
+ },
+ "include_rekor_receipts": {
+ "type": "boolean",
+ "default": false
+ },
+ "compress": {
+ "type": "boolean",
+ "default": true
+ },
+ "compression_algorithm": {
+ "type": "string",
+ "enum": ["gzip", "zstd", "none"],
+ "default": "gzip"
+ }
+ }
+ }
+ }
+ },
+ "ExportFilters": {
+ "type": "object",
+ "description": "Filters to apply when selecting data for export",
+ "properties": {
+ "date_range": {
+ "type": "object",
+ "properties": {
+ "from": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "to": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "relative": {
+ "type": "string",
+ "pattern": "^-?[0-9]+[hdwmy]$",
+ "description": "Relative time range (e.g., -7d for last 7 days)"
+ }
+ }
+ },
+ "severity": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": ["critical", "high", "medium", "low", "info", "unknown"]
+ }
+ },
+ "vex_status": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": ["affected", "not_affected", "fixed", "under_investigation"]
+ }
+ },
+ "components": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "PURL patterns to include"
+ },
+ "exclude_components": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "PURL patterns to exclude"
+ },
+ "cve_ids": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "pattern": "^CVE-[0-9]{4}-[0-9]+$"
+ }
+ },
+ "tags": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "environments": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ }
+ },
+ "ExportSchedule": {
+ "type": "object",
+ "description": "Schedule for automated exports",
+ "properties": {
+ "enabled": {
+ "type": "boolean",
+ "default": false
+ },
+ "cron": {
+ "type": "string",
+ "pattern": "^(@(annually|yearly|monthly|weekly|daily|hourly))|((\\*|[0-9,\\-\\/]+)\\s+){4,5}(\\*|[0-9,\\-\\/]+)$",
+ "description": "Cron expression for scheduling (5 or 6 fields)"
+ },
+ "timezone": {
+ "type": "string",
+ "default": "UTC",
+ "description": "IANA timezone identifier"
+ },
+ "next_run": {
+ "type": "string",
+ "format": "date-time",
+ "readOnly": true
+ },
+ "last_run": {
+ "type": "string",
+ "format": "date-time",
+ "readOnly": true
+ },
+ "last_status": {
+ "type": "string",
+ "enum": ["success", "partial", "failed", "pending"],
+ "readOnly": true
+ }
+ }
+ },
+ "Distribution": {
+ "type": "object",
+ "description": "Distribution targets for exports",
+ "properties": {
+ "targets": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/DistributionTarget"
+ }
+ },
+ "notify_on_completion": {
+ "type": "boolean",
+ "default": true
+ },
+ "notify_on_failure": {
+ "type": "boolean",
+ "default": true
+ }
+ }
+ },
+ "DistributionTarget": {
+ "type": "object",
+ "required": ["type"],
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": ["s3", "azure-blob", "gcs", "sftp", "webhook", "email", "local"]
+ },
+ "name": {
+ "type": "string"
+ },
+ "enabled": {
+ "type": "boolean",
+ "default": true
+ },
+ "config": {
+ "type": "object",
+ "description": "Target-specific configuration",
+ "additionalProperties": true
+ }
+ },
+ "allOf": [
+ {
+ "if": {
+ "properties": { "type": { "const": "s3" } }
+ },
+ "then": {
+ "properties": {
+ "config": {
+ "type": "object",
+ "required": ["bucket", "region"],
+ "properties": {
+ "bucket": { "type": "string" },
+ "region": { "type": "string" },
+ "prefix": { "type": "string" },
+ "credentials_secret": { "type": "string" }
+ }
+ }
+ }
+ }
+ },
+ {
+ "if": {
+ "properties": { "type": { "const": "webhook" } }
+ },
+ "then": {
+ "properties": {
+ "config": {
+ "type": "object",
+ "required": ["url"],
+ "properties": {
+ "url": { "type": "string", "format": "uri" },
+ "method": { "type": "string", "enum": ["POST", "PUT"], "default": "POST" },
+ "headers": { "type": "object", "additionalProperties": { "type": "string" } },
+ "auth_secret": { "type": "string" }
+ }
+ }
+ }
+ }
+ }
+ ]
+ },
+ "RetentionPolicy": {
+ "type": "object",
+ "description": "Retention policy for exported artifacts",
+ "properties": {
+ "max_age_days": {
+ "type": "integer",
+ "minimum": 1,
+ "maximum": 3650,
+ "default": 365
+ },
+ "max_count": {
+ "type": "integer",
+ "minimum": 1,
+ "description": "Maximum number of exports to retain"
+ },
+ "delete_on_success": {
+ "type": "boolean",
+ "default": false,
+ "description": "Delete source data after successful export"
+ }
+ }
+ },
+ "SigningConfig": {
+ "type": "object",
+ "description": "Signing configuration for exports",
+ "properties": {
+ "enabled": {
+ "type": "boolean",
+ "default": true
+ },
+ "key_id": {
+ "type": "string",
+ "description": "Key identifier for signing"
+ },
+ "algorithm": {
+ "type": "string",
+ "enum": ["ES256", "RS256", "EdDSA"],
+ "default": "ES256"
+ },
+ "include_rekor": {
+ "type": "boolean",
+ "default": false,
+ "description": "Include Rekor transparency log receipt"
+ },
+ "timestamp_authority": {
+ "type": "string",
+ "format": "uri",
+ "description": "RFC 3161 timestamp authority URL"
+ }
+ }
+ },
+ "ExportJob": {
+ "type": "object",
+ "description": "Export job status",
+ "required": ["job_id", "profile_id", "status", "created_at"],
+ "properties": {
+ "job_id": {
+ "type": "string",
+ "format": "uuid"
+ },
+ "profile_id": {
+ "type": "string",
+ "format": "uuid"
+ },
+ "status": {
+ "type": "string",
+ "enum": ["pending", "running", "success", "partial", "failed", "cancelled"]
+ },
+ "progress": {
+ "type": "object",
+ "properties": {
+ "percent": {
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 100
+ },
+ "items_processed": {
+ "type": "integer"
+ },
+ "items_total": {
+ "type": "integer"
+ }
+ }
+ },
+ "artifacts": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/ExportArtifact"
+ }
+ },
+ "errors": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "created_at": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "started_at": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "completed_at": {
+ "type": "string",
+ "format": "date-time"
+ }
+ }
+ },
+ "ExportArtifact": {
+ "type": "object",
+ "required": ["artifact_id", "digest", "size"],
+ "properties": {
+ "artifact_id": {
+ "type": "string",
+ "format": "uuid"
+ },
+ "filename": {
+ "type": "string"
+ },
+ "digest": {
+ "type": "string",
+ "pattern": "^sha256:[a-f0-9]{64}$"
+ },
+ "size": {
+ "type": "integer",
+ "description": "Size in bytes"
+ },
+ "format": {
+ "type": "string"
+ },
+ "signature": {
+ "type": "string",
+ "description": "Base64-encoded signature"
+ },
+ "download_url": {
+ "type": "string",
+ "format": "uri"
+ },
+ "expires_at": {
+ "type": "string",
+ "format": "date-time"
+ }
+ }
+ }
+ },
+ "properties": {
+ "profiles": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/ExportProfile"
+ }
+ }
+ },
+ "examples": [
+ {
+ "profiles": [
+ {
+ "profile_id": "550e8400-e29b-41d4-a716-446655440001",
+ "name": "Weekly SBOM Export",
+ "description": "Export all SBOMs in CycloneDX format weekly",
+ "format": {
+ "type": "sbom",
+ "variant": "cyclonedx-1.6",
+ "options": {
+ "include_signatures": true,
+ "compress": true
+ }
+ },
+ "filters": {
+ "date_range": {
+ "relative": "-7d"
+ }
+ },
+ "schedule": {
+ "enabled": true,
+ "cron": "0 2 * * 0",
+ "timezone": "UTC"
+ },
+ "distribution": {
+ "targets": [
+ {
+ "type": "s3",
+ "name": "compliance-bucket",
+ "config": {
+ "bucket": "company-compliance-exports",
+ "region": "us-east-1",
+ "prefix": "sboms/"
+ }
+ }
+ ]
+ },
+ "retention": {
+ "max_age_days": 365,
+ "max_count": 52
+ },
+ "enabled": true,
+ "created_at": "2025-12-01T00:00:00Z"
+ }
+ ]
+ }
+ ]
+}
diff --git a/docs/schemas/notify-rules.schema.json b/docs/schemas/notify-rules.schema.json
new file mode 100644
index 000000000..6e80b775c
--- /dev/null
+++ b/docs/schemas/notify-rules.schema.json
@@ -0,0 +1,605 @@
+{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://stella-ops.org/schemas/notify-rules.schema.json",
+ "title": "StellaOps Notification Rules Schema",
+ "description": "Schema for notification rules, webhook payloads, and digest formats. Unblocks CLI-NOTIFY-38-001.",
+ "type": "object",
+ "definitions": {
+ "NotifyRule": {
+ "type": "object",
+ "required": ["rule_id", "name", "event_types", "channels", "created_at"],
+ "properties": {
+ "rule_id": {
+ "type": "string",
+ "format": "uuid",
+ "description": "Unique identifier for the notification rule"
+ },
+ "name": {
+ "type": "string",
+ "minLength": 1,
+ "maxLength": 128,
+ "description": "Human-readable rule name"
+ },
+ "description": {
+ "type": "string",
+ "maxLength": 512
+ },
+ "event_types": {
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "$ref": "#/definitions/EventType"
+ },
+ "description": "Event types that trigger this rule"
+ },
+ "filters": {
+ "$ref": "#/definitions/NotifyFilters"
+ },
+ "channels": {
+ "type": "array",
+ "minItems": 1,
+ "items": {
+ "$ref": "#/definitions/NotifyChannel"
+ }
+ },
+ "throttle": {
+ "$ref": "#/definitions/ThrottleConfig"
+ },
+ "digest": {
+ "$ref": "#/definitions/DigestConfig"
+ },
+ "templates": {
+ "$ref": "#/definitions/NotifyTemplates"
+ },
+ "enabled": {
+ "type": "boolean",
+ "default": true
+ },
+ "priority": {
+ "type": "integer",
+ "minimum": 0,
+ "maximum": 100,
+ "default": 50,
+ "description": "Rule priority (higher = processed first)"
+ },
+ "tenant_id": {
+ "type": "string",
+ "format": "uuid"
+ },
+ "created_at": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "updated_at": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "created_by": {
+ "type": "string"
+ }
+ }
+ },
+ "EventType": {
+ "type": "string",
+ "enum": [
+ "vulnerability.new",
+ "vulnerability.updated",
+ "vulnerability.resolved",
+ "vulnerability.critical",
+ "vex.status_changed",
+ "vex.consensus_changed",
+ "policy.violation",
+ "policy.override_requested",
+ "policy.override_approved",
+ "policy.override_expired",
+ "scan.completed",
+ "scan.failed",
+ "attestation.created",
+ "attestation.verification_failed",
+ "airgap.staleness_warning",
+ "airgap.staleness_critical",
+ "airgap.bundle_imported",
+ "export.completed",
+ "export.failed",
+ "system.health_degraded",
+ "system.error"
+ ]
+ },
+ "NotifyFilters": {
+ "type": "object",
+ "description": "Filters to apply before triggering notification",
+ "properties": {
+ "severity": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": ["critical", "high", "medium", "low", "info"]
+ },
+ "description": "Only trigger for these severities"
+ },
+ "cvss_minimum": {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 10,
+ "description": "Minimum CVSS score to trigger"
+ },
+ "components": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "PURL patterns to match"
+ },
+ "environments": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "tags": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "kev_only": {
+ "type": "boolean",
+ "default": false,
+ "description": "Only trigger for Known Exploited Vulnerabilities"
+ },
+ "fix_available": {
+ "type": "boolean",
+ "description": "Filter by fix availability"
+ }
+ }
+ },
+ "NotifyChannel": {
+ "type": "object",
+ "required": ["type"],
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": ["email", "slack", "teams", "webhook", "pagerduty", "opsgenie", "sns"]
+ },
+ "name": {
+ "type": "string"
+ },
+ "enabled": {
+ "type": "boolean",
+ "default": true
+ },
+ "config": {
+ "type": "object",
+ "additionalProperties": true
+ }
+ },
+ "allOf": [
+ {
+ "if": { "properties": { "type": { "const": "email" } } },
+ "then": {
+ "properties": {
+ "config": {
+ "type": "object",
+ "required": ["recipients"],
+ "properties": {
+ "recipients": {
+ "type": "array",
+ "items": { "type": "string", "format": "email" }
+ },
+ "cc": {
+ "type": "array",
+ "items": { "type": "string", "format": "email" }
+ },
+ "subject_prefix": { "type": "string" }
+ }
+ }
+ }
+ }
+ },
+ {
+ "if": { "properties": { "type": { "const": "slack" } } },
+ "then": {
+ "properties": {
+ "config": {
+ "type": "object",
+ "required": ["webhook_url"],
+ "properties": {
+ "webhook_url": { "type": "string", "format": "uri" },
+ "channel": { "type": "string" },
+ "username": { "type": "string" },
+ "icon_emoji": { "type": "string" }
+ }
+ }
+ }
+ }
+ },
+ {
+ "if": { "properties": { "type": { "const": "teams" } } },
+ "then": {
+ "properties": {
+ "config": {
+ "type": "object",
+ "required": ["webhook_url"],
+ "properties": {
+ "webhook_url": { "type": "string", "format": "uri" }
+ }
+ }
+ }
+ }
+ },
+ {
+ "if": { "properties": { "type": { "const": "webhook" } } },
+ "then": {
+ "properties": {
+ "config": {
+ "type": "object",
+ "required": ["url"],
+ "properties": {
+ "url": { "type": "string", "format": "uri" },
+ "method": { "type": "string", "enum": ["POST", "PUT"], "default": "POST" },
+ "headers": { "type": "object", "additionalProperties": { "type": "string" } },
+ "auth_type": { "type": "string", "enum": ["none", "basic", "bearer", "hmac"] },
+ "auth_secret": { "type": "string" },
+ "retry_count": { "type": "integer", "minimum": 0, "maximum": 5, "default": 3 },
+ "timeout_seconds": { "type": "integer", "minimum": 1, "maximum": 60, "default": 30 }
+ }
+ }
+ }
+ }
+ },
+ {
+ "if": { "properties": { "type": { "const": "pagerduty" } } },
+ "then": {
+ "properties": {
+ "config": {
+ "type": "object",
+ "required": ["routing_key"],
+ "properties": {
+ "routing_key": { "type": "string" },
+ "severity_mapping": {
+ "type": "object",
+ "additionalProperties": { "type": "string", "enum": ["critical", "error", "warning", "info"] }
+ }
+ }
+ }
+ }
+ }
+ }
+ ]
+ },
+ "ThrottleConfig": {
+ "type": "object",
+ "description": "Throttling configuration to prevent notification storms",
+ "properties": {
+ "enabled": {
+ "type": "boolean",
+ "default": true
+ },
+ "max_per_hour": {
+ "type": "integer",
+ "minimum": 1,
+ "default": 100
+ },
+ "max_per_day": {
+ "type": "integer",
+ "minimum": 1,
+ "default": 1000
+ },
+ "dedupe_window_seconds": {
+ "type": "integer",
+ "minimum": 0,
+ "default": 300,
+ "description": "Window for deduplicating identical notifications"
+ },
+ "dedupe_key_fields": {
+ "type": "array",
+ "items": { "type": "string" },
+ "default": ["event_type", "cve_id", "purl"],
+ "description": "Fields to use for deduplication key"
+ }
+ }
+ },
+ "DigestConfig": {
+ "type": "object",
+ "description": "Configuration for digest/summary notifications",
+ "properties": {
+ "enabled": {
+ "type": "boolean",
+ "default": false
+ },
+ "frequency": {
+ "type": "string",
+ "enum": ["hourly", "daily", "weekly"],
+ "default": "daily"
+ },
+ "schedule": {
+ "type": "string",
+ "description": "Cron expression for digest delivery"
+ },
+ "timezone": {
+ "type": "string",
+ "default": "UTC"
+ },
+ "min_events": {
+ "type": "integer",
+ "minimum": 1,
+ "default": 1,
+ "description": "Minimum events required to send digest"
+ },
+ "group_by": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "enum": ["severity", "event_type", "component", "environment"]
+ },
+ "description": "Fields to group events by in digest"
+ },
+ "include_summary": {
+ "type": "boolean",
+ "default": true
+ },
+ "include_details": {
+ "type": "boolean",
+ "default": false,
+ "description": "Include full event details in digest"
+ }
+ }
+ },
+ "NotifyTemplates": {
+ "type": "object",
+ "description": "Custom notification templates",
+ "properties": {
+ "subject": {
+ "type": "string",
+ "description": "Template for notification subject (supports {{variables}})"
+ },
+ "body": {
+ "type": "string",
+ "description": "Template for notification body"
+ },
+ "body_html": {
+ "type": "string",
+ "description": "HTML template for email body"
+ }
+ }
+ },
+ "WebhookPayload": {
+ "type": "object",
+ "description": "Standard webhook payload format",
+ "required": ["id", "timestamp", "event_type", "data"],
+ "properties": {
+ "id": {
+ "type": "string",
+ "format": "uuid",
+ "description": "Unique notification ID"
+ },
+ "timestamp": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "event_type": {
+ "$ref": "#/definitions/EventType"
+ },
+ "version": {
+ "type": "string",
+ "default": "1.0.0"
+ },
+ "tenant_id": {
+ "type": "string",
+ "format": "uuid"
+ },
+ "data": {
+ "type": "object",
+ "description": "Event-specific payload data",
+ "additionalProperties": true
+ },
+ "metadata": {
+ "type": "object",
+ "properties": {
+ "rule_id": { "type": "string", "format": "uuid" },
+ "rule_name": { "type": "string" },
+ "retry_count": { "type": "integer" },
+ "digest_id": { "type": "string", "format": "uuid" }
+ }
+ }
+ }
+ },
+ "DigestPayload": {
+ "type": "object",
+ "description": "Digest/summary notification payload",
+ "required": ["id", "timestamp", "period", "summary"],
+ "properties": {
+ "id": {
+ "type": "string",
+ "format": "uuid"
+ },
+ "timestamp": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "period": {
+ "type": "object",
+ "required": ["start", "end"],
+ "properties": {
+ "start": { "type": "string", "format": "date-time" },
+ "end": { "type": "string", "format": "date-time" }
+ }
+ },
+ "summary": {
+ "type": "object",
+ "properties": {
+ "total_events": { "type": "integer" },
+ "by_severity": {
+ "type": "object",
+ "additionalProperties": { "type": "integer" }
+ },
+ "by_event_type": {
+ "type": "object",
+ "additionalProperties": { "type": "integer" }
+ },
+ "new_vulnerabilities": { "type": "integer" },
+ "resolved_vulnerabilities": { "type": "integer" },
+ "policy_violations": { "type": "integer" }
+ }
+ },
+ "events": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/WebhookPayload"
+ },
+ "description": "Optional detailed event list"
+ },
+ "groups": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "key": { "type": "string" },
+ "count": { "type": "integer" },
+ "sample_events": {
+ "type": "array",
+ "items": { "$ref": "#/definitions/WebhookPayload" }
+ }
+ }
+ }
+ }
+ }
+ },
+ "NotifySimulationRequest": {
+ "type": "object",
+ "description": "Request to simulate a notification rule",
+ "required": ["event"],
+ "properties": {
+ "rule_id": {
+ "type": "string",
+ "format": "uuid",
+ "description": "Rule to simulate (optional, uses all matching if not specified)"
+ },
+ "event": {
+ "$ref": "#/definitions/WebhookPayload"
+ },
+ "dry_run": {
+ "type": "boolean",
+ "default": true,
+ "description": "If true, don't actually send notifications"
+ }
+ }
+ },
+ "NotifySimulationResult": {
+ "type": "object",
+ "required": ["matched_rules", "would_notify"],
+ "properties": {
+ "matched_rules": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "rule_id": { "type": "string", "format": "uuid" },
+ "rule_name": { "type": "string" },
+ "matched": { "type": "boolean" },
+ "reason": { "type": "string" }
+ }
+ }
+ },
+ "would_notify": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "channel_type": { "type": "string" },
+ "channel_name": { "type": "string" },
+ "payload_preview": { "type": "object" }
+ }
+ }
+ },
+ "throttled": {
+ "type": "boolean"
+ },
+ "throttle_reason": {
+ "type": "string"
+ }
+ }
+ },
+ "NotifyAckToken": {
+ "type": "object",
+ "description": "Acknowledgement token for notifications",
+ "required": ["token", "notification_id", "expires_at"],
+ "properties": {
+ "token": {
+ "type": "string",
+ "description": "Opaque acknowledgement token"
+ },
+ "notification_id": {
+ "type": "string",
+ "format": "uuid"
+ },
+ "event_type": {
+ "$ref": "#/definitions/EventType"
+ },
+ "expires_at": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "ack_url": {
+ "type": "string",
+ "format": "uri",
+ "description": "URL to acknowledge the notification"
+ }
+ }
+ }
+ },
+ "properties": {
+ "rules": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/NotifyRule"
+ }
+ }
+ },
+ "examples": [
+ {
+ "rules": [
+ {
+ "rule_id": "550e8400-e29b-41d4-a716-446655440002",
+ "name": "Critical Vulnerability Alert",
+ "description": "Immediate notification for critical vulnerabilities",
+ "event_types": ["vulnerability.critical", "vulnerability.new"],
+ "filters": {
+ "severity": ["critical"],
+ "kev_only": false
+ },
+ "channels": [
+ {
+ "type": "slack",
+ "name": "security-alerts",
+ "config": {
+ "webhook_url": "https://hooks.slack.com/services/xxx",
+ "channel": "#security-alerts",
+ "icon_emoji": ":warning:"
+ }
+ },
+ {
+ "type": "pagerduty",
+ "name": "security-oncall",
+ "config": {
+ "routing_key": "xxx",
+ "severity_mapping": {
+ "critical": "critical",
+ "high": "error"
+ }
+ }
+ }
+ ],
+ "throttle": {
+ "enabled": true,
+ "max_per_hour": 50,
+ "dedupe_window_seconds": 300
+ },
+ "enabled": true,
+ "priority": 100,
+ "created_at": "2025-12-01T00:00:00Z"
+ }
+ ]
+ }
+ ]
+}
diff --git a/docs/schemas/policy-registry-api.openapi.yaml b/docs/schemas/policy-registry-api.openapi.yaml
new file mode 100644
index 000000000..92d2bf254
--- /dev/null
+++ b/docs/schemas/policy-registry-api.openapi.yaml
@@ -0,0 +1,1510 @@
+openapi: 3.1.0
+info:
+ title: StellaOps Policy Registry API
+ version: 1.0.0
+ description: |
+ Policy Registry API for managing verification policies, policy packs, snapshots,
+ violations, overrides, and air-gap operations.
+
+ This specification unblocks: REGISTRY-API-27-001 through 27-010
+ contact:
+ name: Policy Registry Guild
+ email: policy-guild@stella-ops.org
+ license:
+ name: AGPL-3.0-or-later
+ url: https://www.gnu.org/licenses/agpl-3.0.html
+
+servers:
+ - url: /api/v1/policy
+ description: Policy Engine API
+
+tags:
+ - name: verification-policy
+ description: Verification policy CRUD operations
+ - name: policy-pack
+ description: Policy pack workspace, compile, simulation
+ - name: snapshot
+ description: Policy snapshot management
+ - name: violation
+ description: Policy violation tracking
+ - name: override
+ description: Policy override management
+ - name: sealed-mode
+ description: Air-gap sealed mode operations
+ - name: staleness
+ description: Advisory staleness tracking
+
+paths:
+ # ============================================================
+ # VERIFICATION POLICY ENDPOINTS
+ # ============================================================
+ /verification-policies:
+ get:
+ operationId: listVerificationPolicies
+ tags: [verification-policy]
+ summary: List all verification policies
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ - $ref: '#/components/parameters/PageSize'
+ - $ref: '#/components/parameters/PageToken'
+ responses:
+ '200':
+ description: List of verification policies
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/VerificationPolicyList'
+ '401':
+ $ref: '#/components/responses/Unauthorized'
+ post:
+ operationId: createVerificationPolicy
+ tags: [verification-policy]
+ summary: Create a new verification policy
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreateVerificationPolicyRequest'
+ responses:
+ '201':
+ description: Policy created
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/VerificationPolicy'
+ '400':
+ $ref: '#/components/responses/BadRequest'
+ '409':
+ $ref: '#/components/responses/Conflict'
+
+ /verification-policies/{policyId}:
+ parameters:
+ - $ref: '#/components/parameters/PolicyId'
+ - $ref: '#/components/parameters/TenantHeader'
+ get:
+ operationId: getVerificationPolicy
+ tags: [verification-policy]
+ summary: Get a verification policy by ID
+ responses:
+ '200':
+ description: Verification policy
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/VerificationPolicy'
+ '404':
+ $ref: '#/components/responses/NotFound'
+ put:
+ operationId: updateVerificationPolicy
+ tags: [verification-policy]
+ summary: Update a verification policy
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/UpdateVerificationPolicyRequest'
+ responses:
+ '200':
+ description: Policy updated
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/VerificationPolicy'
+ '404':
+ $ref: '#/components/responses/NotFound'
+ delete:
+ operationId: deleteVerificationPolicy
+ tags: [verification-policy]
+ summary: Delete a verification policy
+ responses:
+ '204':
+ description: Policy deleted
+ '404':
+ $ref: '#/components/responses/NotFound'
+
+ # ============================================================
+ # POLICY PACK WORKSPACE ENDPOINTS
+ # ============================================================
+ /packs:
+ get:
+ operationId: listPolicyPacks
+ tags: [policy-pack]
+ summary: List policy packs in workspace
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ - $ref: '#/components/parameters/PageSize'
+ - $ref: '#/components/parameters/PageToken'
+ - name: status
+ in: query
+ schema:
+ type: string
+ enum: [draft, published, archived]
+ responses:
+ '200':
+ description: List of policy packs
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/PolicyPackList'
+ post:
+ operationId: createPolicyPack
+ tags: [policy-pack]
+ summary: Create a new policy pack
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreatePolicyPackRequest'
+ responses:
+ '201':
+ description: Policy pack created
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/PolicyPack'
+
+ /packs/{packId}:
+ parameters:
+ - $ref: '#/components/parameters/PackId'
+ - $ref: '#/components/parameters/TenantHeader'
+ get:
+ operationId: getPolicyPack
+ tags: [policy-pack]
+ summary: Get a policy pack by ID
+ responses:
+ '200':
+ description: Policy pack
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/PolicyPack'
+ '404':
+ $ref: '#/components/responses/NotFound'
+ put:
+ operationId: updatePolicyPack
+ tags: [policy-pack]
+ summary: Update a policy pack
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/UpdatePolicyPackRequest'
+ responses:
+ '200':
+ description: Policy pack updated
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/PolicyPack'
+ delete:
+ operationId: deletePolicyPack
+ tags: [policy-pack]
+ summary: Delete a policy pack (draft only)
+ responses:
+ '204':
+ description: Policy pack deleted
+ '409':
+ description: Cannot delete published pack
+
+ /packs/{packId}/compile:
+ post:
+ operationId: compilePolicyPack
+ tags: [policy-pack]
+ summary: Compile a policy pack
+ parameters:
+ - $ref: '#/components/parameters/PackId'
+ - $ref: '#/components/parameters/TenantHeader'
+ responses:
+ '200':
+ description: Compilation result
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CompilationResult'
+ '422':
+ description: Compilation errors
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CompilationResult'
+
+ /packs/{packId}/simulate:
+ post:
+ operationId: simulatePolicyPack
+ tags: [policy-pack]
+ summary: Simulate a policy pack against sample data
+ parameters:
+ - $ref: '#/components/parameters/PackId'
+ - $ref: '#/components/parameters/TenantHeader'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/SimulationRequest'
+ responses:
+ '200':
+ description: Simulation result
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/SimulationResult'
+
+ /packs/{packId}/publish:
+ post:
+ operationId: publishPolicyPack
+ tags: [policy-pack]
+ summary: Publish a policy pack (requires review approval)
+ parameters:
+ - $ref: '#/components/parameters/PackId'
+ - $ref: '#/components/parameters/TenantHeader'
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/PublishRequest'
+ responses:
+ '200':
+ description: Pack published
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/PolicyPack'
+ '409':
+ description: Pack not in reviewable state
+
+ /packs/{packId}/promote:
+ post:
+ operationId: promotePolicyPack
+ tags: [policy-pack]
+ summary: Promote a policy pack to production
+ parameters:
+ - $ref: '#/components/parameters/PackId'
+ - $ref: '#/components/parameters/TenantHeader'
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/PromoteRequest'
+ responses:
+ '200':
+ description: Pack promoted
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/PolicyPack'
+
+ # ============================================================
+ # SNAPSHOT ENDPOINTS
+ # ============================================================
+ /snapshots:
+ get:
+ operationId: listSnapshots
+ tags: [snapshot]
+ summary: List policy snapshots
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ - $ref: '#/components/parameters/PageSize'
+ - $ref: '#/components/parameters/PageToken'
+ responses:
+ '200':
+ description: List of snapshots
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/SnapshotList'
+ post:
+ operationId: createSnapshot
+ tags: [snapshot]
+ summary: Create a policy snapshot
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreateSnapshotRequest'
+ responses:
+ '201':
+ description: Snapshot created
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Snapshot'
+
+ /snapshots/{snapshotId}:
+ parameters:
+ - $ref: '#/components/parameters/SnapshotId'
+ - $ref: '#/components/parameters/TenantHeader'
+ get:
+ operationId: getSnapshot
+ tags: [snapshot]
+ summary: Get a snapshot by ID
+ responses:
+ '200':
+ description: Snapshot
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Snapshot'
+ '404':
+ $ref: '#/components/responses/NotFound'
+ delete:
+ operationId: deleteSnapshot
+ tags: [snapshot]
+ summary: Delete a snapshot
+ responses:
+ '204':
+ description: Snapshot deleted
+
+ /snapshots/by-digest/{digest}:
+ get:
+ operationId: getSnapshotByDigest
+ tags: [snapshot]
+ summary: Get a snapshot by content digest
+ parameters:
+ - name: digest
+ in: path
+ required: true
+ schema:
+ type: string
+ pattern: '^sha256:[a-f0-9]{64}$'
+ - $ref: '#/components/parameters/TenantHeader'
+ responses:
+ '200':
+ description: Snapshot
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Snapshot'
+ '404':
+ $ref: '#/components/responses/NotFound'
+
+ # ============================================================
+ # VIOLATION ENDPOINTS
+ # ============================================================
+ /violations:
+ get:
+ operationId: listViolations
+ tags: [violation]
+ summary: List policy violations
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ - $ref: '#/components/parameters/PageSize'
+ - $ref: '#/components/parameters/PageToken'
+ - name: severity
+ in: query
+ schema:
+ type: string
+ enum: [critical, high, medium, low, info]
+ responses:
+ '200':
+ description: List of violations
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ViolationList'
+ post:
+ operationId: appendViolation
+ tags: [violation]
+ summary: Append a new violation
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreateViolationRequest'
+ responses:
+ '201':
+ description: Violation created
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Violation'
+
+ /violations/batch:
+ post:
+ operationId: appendViolationBatch
+ tags: [violation]
+ summary: Append violations in batch
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ViolationBatchRequest'
+ responses:
+ '201':
+ description: Violations created
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ViolationBatchResult'
+
+ /violations/{violationId}:
+ get:
+ operationId: getViolation
+ tags: [violation]
+ summary: Get a violation by ID
+ parameters:
+ - $ref: '#/components/parameters/ViolationId'
+ - $ref: '#/components/parameters/TenantHeader'
+ responses:
+ '200':
+ description: Violation
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Violation'
+ '404':
+ $ref: '#/components/responses/NotFound'
+
+ # ============================================================
+ # OVERRIDE ENDPOINTS
+ # ============================================================
+ /overrides:
+ post:
+ operationId: createOverride
+ tags: [override]
+ summary: Create a policy override
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreateOverrideRequest'
+ responses:
+ '201':
+ description: Override created
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Override'
+
+ /overrides/{overrideId}:
+ parameters:
+ - $ref: '#/components/parameters/OverrideId'
+ - $ref: '#/components/parameters/TenantHeader'
+ get:
+ operationId: getOverride
+ tags: [override]
+ summary: Get an override by ID
+ responses:
+ '200':
+ description: Override
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Override'
+ '404':
+ $ref: '#/components/responses/NotFound'
+ delete:
+ operationId: deleteOverride
+ tags: [override]
+ summary: Delete an override
+ responses:
+ '204':
+ description: Override deleted
+
+ /overrides/{overrideId}:approve:
+ post:
+ operationId: approveOverride
+ tags: [override]
+ summary: Approve an override
+ parameters:
+ - $ref: '#/components/parameters/OverrideId'
+ - $ref: '#/components/parameters/TenantHeader'
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ApproveOverrideRequest'
+ responses:
+ '200':
+ description: Override approved
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Override'
+
+ /overrides/{overrideId}:disable:
+ post:
+ operationId: disableOverride
+ tags: [override]
+ summary: Disable an override
+ parameters:
+ - $ref: '#/components/parameters/OverrideId'
+ - $ref: '#/components/parameters/TenantHeader'
+ responses:
+ '200':
+ description: Override disabled
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Override'
+
+ # ============================================================
+ # SEALED MODE ENDPOINTS
+ # ============================================================
+ /sealed-mode/status:
+ get:
+ operationId: getSealedModeStatus
+ tags: [sealed-mode]
+ summary: Get sealed mode status
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ responses:
+ '200':
+ description: Sealed mode status
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/SealedModeStatus'
+
+ /sealed-mode/seal:
+ post:
+ operationId: seal
+ tags: [sealed-mode]
+ summary: Activate sealed mode (air-gap)
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/SealRequest'
+ responses:
+ '200':
+ description: Environment sealed
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/SealedModeStatus'
+
+ /sealed-mode/unseal:
+ post:
+ operationId: unseal
+ tags: [sealed-mode]
+ summary: Deactivate sealed mode
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/UnsealRequest'
+ responses:
+ '200':
+ description: Environment unsealed
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/SealedModeStatus'
+
+ /sealed-mode/verify:
+ post:
+ operationId: verifyBundle
+ tags: [sealed-mode]
+ summary: Verify an air-gap bundle
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/VerifyBundleRequest'
+ responses:
+ '200':
+ description: Bundle verification result
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/BundleVerificationResult'
+
+ # ============================================================
+ # STALENESS ENDPOINTS
+ # ============================================================
+ /staleness/status:
+ get:
+ operationId: getStalenessStatus
+ tags: [staleness]
+ summary: Get advisory staleness status
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ responses:
+ '200':
+ description: Staleness status
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/StalenessStatus'
+
+ /staleness/evaluate:
+ post:
+ operationId: evaluateStaleness
+ tags: [staleness]
+ summary: Evaluate staleness for a specific advisory source
+ parameters:
+ - $ref: '#/components/parameters/TenantHeader'
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/EvaluateStalenessRequest'
+ responses:
+ '200':
+ description: Evaluation result
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/StalenessEvaluation'
+
+components:
+ parameters:
+ TenantHeader:
+ name: X-Tenant-Id
+ in: header
+ required: true
+ schema:
+ type: string
+ format: uuid
+ description: Tenant identifier for multi-tenant isolation
+
+ PolicyId:
+ name: policyId
+ in: path
+ required: true
+ schema:
+ type: string
+ description: Verification policy identifier
+
+ PackId:
+ name: packId
+ in: path
+ required: true
+ schema:
+ type: string
+ format: uuid
+ description: Policy pack identifier
+
+ SnapshotId:
+ name: snapshotId
+ in: path
+ required: true
+ schema:
+ type: string
+ format: uuid
+ description: Snapshot identifier
+
+ ViolationId:
+ name: violationId
+ in: path
+ required: true
+ schema:
+ type: string
+ format: uuid
+ description: Violation identifier
+
+ OverrideId:
+ name: overrideId
+ in: path
+ required: true
+ schema:
+ type: string
+ format: uuid
+ description: Override identifier
+
+ PageSize:
+ name: page_size
+ in: query
+ schema:
+ type: integer
+ minimum: 1
+ maximum: 100
+ default: 20
+
+ PageToken:
+ name: page_token
+ in: query
+ schema:
+ type: string
+
+ responses:
+ BadRequest:
+ description: Bad request
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ProblemDetails'
+ Unauthorized:
+ description: Unauthorized
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ProblemDetails'
+ NotFound:
+ description: Resource not found
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ProblemDetails'
+ Conflict:
+ description: Conflict (resource already exists)
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ProblemDetails'
+
+ schemas:
+ # ============================================================
+ # VERIFICATION POLICY SCHEMAS
+ # ============================================================
+ VerificationPolicy:
+ type: object
+ required: [policy_id, version, tenant_scope, predicate_types, signer_requirements, created_at, updated_at]
+ properties:
+ policy_id:
+ type: string
+ version:
+ type: string
+ description:
+ type: string
+ tenant_scope:
+ type: string
+ predicate_types:
+ type: array
+ items:
+ type: string
+ signer_requirements:
+ $ref: '#/components/schemas/SignerRequirements'
+ validity_window:
+ $ref: '#/components/schemas/ValidityWindow'
+ metadata:
+ type: object
+ additionalProperties: true
+ created_at:
+ type: string
+ format: date-time
+ updated_at:
+ type: string
+ format: date-time
+
+ SignerRequirements:
+ type: object
+ required: [minimum_signatures, trusted_key_fingerprints, require_rekor]
+ properties:
+ minimum_signatures:
+ type: integer
+ minimum: 1
+ default: 1
+ trusted_key_fingerprints:
+ type: array
+ items:
+ type: string
+ trusted_issuers:
+ type: array
+ items:
+ type: string
+ require_rekor:
+ type: boolean
+ default: false
+ algorithms:
+ type: array
+ items:
+ type: string
+ enum: [ES256, RS256, EdDSA, ES384, RS384, PS256, PS384]
+
+ ValidityWindow:
+ type: object
+ properties:
+ not_before:
+ type: string
+ format: date-time
+ not_after:
+ type: string
+ format: date-time
+ max_attestation_age:
+ type: integer
+ description: Maximum age of attestation in seconds
+
+ CreateVerificationPolicyRequest:
+ type: object
+ required: [policy_id, version, predicate_types]
+ properties:
+ policy_id:
+ type: string
+ version:
+ type: string
+ description:
+ type: string
+ tenant_scope:
+ type: string
+ predicate_types:
+ type: array
+ items:
+ type: string
+ signer_requirements:
+ $ref: '#/components/schemas/SignerRequirements'
+ validity_window:
+ $ref: '#/components/schemas/ValidityWindow'
+ metadata:
+ type: object
+ additionalProperties: true
+
+ UpdateVerificationPolicyRequest:
+ type: object
+ properties:
+ version:
+ type: string
+ description:
+ type: string
+ predicate_types:
+ type: array
+ items:
+ type: string
+ signer_requirements:
+ $ref: '#/components/schemas/SignerRequirements'
+ validity_window:
+ $ref: '#/components/schemas/ValidityWindow'
+ metadata:
+ type: object
+ additionalProperties: true
+
+ VerificationPolicyList:
+ type: object
+ required: [items]
+ properties:
+ items:
+ type: array
+ items:
+ $ref: '#/components/schemas/VerificationPolicy'
+ next_page_token:
+ type: string
+ total_count:
+ type: integer
+
+ # ============================================================
+ # POLICY PACK SCHEMAS
+ # ============================================================
+ PolicyPack:
+ type: object
+ required: [pack_id, name, version, status, created_at, updated_at]
+ properties:
+ pack_id:
+ type: string
+ format: uuid
+ name:
+ type: string
+ version:
+ type: string
+ description:
+ type: string
+ status:
+ type: string
+ enum: [draft, pending_review, published, archived]
+ rules:
+ type: array
+ items:
+ $ref: '#/components/schemas/PolicyRule'
+ metadata:
+ type: object
+ additionalProperties: true
+ created_at:
+ type: string
+ format: date-time
+ updated_at:
+ type: string
+ format: date-time
+ published_at:
+ type: string
+ format: date-time
+ digest:
+ type: string
+ description: Content-addressable hash of the pack
+
+ PolicyRule:
+ type: object
+ required: [rule_id, name, severity]
+ properties:
+ rule_id:
+ type: string
+ name:
+ type: string
+ description:
+ type: string
+ severity:
+ type: string
+ enum: [critical, high, medium, low, info]
+ rego:
+ type: string
+ description: OPA/Rego policy code
+ enabled:
+ type: boolean
+ default: true
+
+ CreatePolicyPackRequest:
+ type: object
+ required: [name, version]
+ properties:
+ name:
+ type: string
+ version:
+ type: string
+ description:
+ type: string
+ rules:
+ type: array
+ items:
+ $ref: '#/components/schemas/PolicyRule'
+ metadata:
+ type: object
+ additionalProperties: true
+
+ UpdatePolicyPackRequest:
+ type: object
+ properties:
+ name:
+ type: string
+ description:
+ type: string
+ rules:
+ type: array
+ items:
+ $ref: '#/components/schemas/PolicyRule'
+ metadata:
+ type: object
+ additionalProperties: true
+
+ PolicyPackList:
+ type: object
+ required: [items]
+ properties:
+ items:
+ type: array
+ items:
+ $ref: '#/components/schemas/PolicyPack'
+ next_page_token:
+ type: string
+
+ CompilationResult:
+ type: object
+ required: [success]
+ properties:
+ success:
+ type: boolean
+ errors:
+ type: array
+ items:
+ $ref: '#/components/schemas/CompilationError'
+ warnings:
+ type: array
+ items:
+ $ref: '#/components/schemas/CompilationWarning'
+ digest:
+ type: string
+
+ CompilationError:
+ type: object
+ required: [message]
+ properties:
+ rule_id:
+ type: string
+ line:
+ type: integer
+ column:
+ type: integer
+ message:
+ type: string
+
+ CompilationWarning:
+ type: object
+ required: [message]
+ properties:
+ rule_id:
+ type: string
+ message:
+ type: string
+
+ SimulationRequest:
+ type: object
+ required: [input]
+ properties:
+ input:
+ type: object
+ additionalProperties: true
+ description: Input data to simulate against
+ options:
+ type: object
+ properties:
+ trace:
+ type: boolean
+ default: false
+ explain:
+ type: boolean
+ default: false
+
+ SimulationResult:
+ type: object
+ required: [result]
+ properties:
+ result:
+ type: object
+ additionalProperties: true
+ violations:
+ type: array
+ items:
+ $ref: '#/components/schemas/SimulatedViolation'
+ trace:
+ type: array
+ items:
+ type: string
+ explain:
+ $ref: '#/components/schemas/PolicyExplainTrace'
+
+ SimulatedViolation:
+ type: object
+ required: [rule_id, severity, message]
+ properties:
+ rule_id:
+ type: string
+ severity:
+ type: string
+ message:
+ type: string
+ context:
+ type: object
+ additionalProperties: true
+
+ PolicyExplainTrace:
+ type: object
+ properties:
+ steps:
+ type: array
+ items:
+ type: object
+
+ PublishRequest:
+ type: object
+ properties:
+ approval_id:
+ type: string
+ description: Optional approval reference
+
+ PromoteRequest:
+ type: object
+ properties:
+ target_environment:
+ type: string
+ enum: [staging, production]
+ approval_id:
+ type: string
+
+ # ============================================================
+ # SNAPSHOT SCHEMAS
+ # ============================================================
+ Snapshot:
+ type: object
+ required: [snapshot_id, digest, created_at]
+ properties:
+ snapshot_id:
+ type: string
+ format: uuid
+ digest:
+ type: string
+ pattern: '^sha256:[a-f0-9]{64}$'
+ description:
+ type: string
+ pack_ids:
+ type: array
+ items:
+ type: string
+ format: uuid
+ metadata:
+ type: object
+ additionalProperties: true
+ created_at:
+ type: string
+ format: date-time
+ created_by:
+ type: string
+
+ CreateSnapshotRequest:
+ type: object
+ required: [pack_ids]
+ properties:
+ description:
+ type: string
+ pack_ids:
+ type: array
+ items:
+ type: string
+ format: uuid
+ metadata:
+ type: object
+ additionalProperties: true
+
+ SnapshotList:
+ type: object
+ required: [items]
+ properties:
+ items:
+ type: array
+ items:
+ $ref: '#/components/schemas/Snapshot'
+ next_page_token:
+ type: string
+
+ # ============================================================
+ # VIOLATION SCHEMAS
+ # ============================================================
+ Violation:
+ type: object
+ required: [violation_id, rule_id, severity, message, created_at]
+ properties:
+ violation_id:
+ type: string
+ format: uuid
+ policy_id:
+ type: string
+ rule_id:
+ type: string
+ severity:
+ type: string
+ enum: [critical, high, medium, low, info]
+ message:
+ type: string
+ purl:
+ type: string
+ cve_id:
+ type: string
+ context:
+ type: object
+ additionalProperties: true
+ created_at:
+ type: string
+ format: date-time
+
+ CreateViolationRequest:
+ type: object
+ required: [rule_id, severity, message]
+ properties:
+ policy_id:
+ type: string
+ rule_id:
+ type: string
+ severity:
+ type: string
+ enum: [critical, high, medium, low, info]
+ message:
+ type: string
+ purl:
+ type: string
+ cve_id:
+ type: string
+ context:
+ type: object
+ additionalProperties: true
+
+ ViolationBatchRequest:
+ type: object
+ required: [violations]
+ properties:
+ violations:
+ type: array
+ items:
+ $ref: '#/components/schemas/CreateViolationRequest'
+ maxItems: 1000
+
+ ViolationBatchResult:
+ type: object
+ required: [created, failed]
+ properties:
+ created:
+ type: integer
+ failed:
+ type: integer
+ errors:
+ type: array
+ items:
+ type: object
+ properties:
+ index:
+ type: integer
+ error:
+ type: string
+
+ ViolationList:
+ type: object
+ required: [items]
+ properties:
+ items:
+ type: array
+ items:
+ $ref: '#/components/schemas/Violation'
+ next_page_token:
+ type: string
+ total_count:
+ type: integer
+
+ # ============================================================
+ # OVERRIDE SCHEMAS
+ # ============================================================
+ Override:
+ type: object
+ required: [override_id, rule_id, status, created_at]
+ properties:
+ override_id:
+ type: string
+ format: uuid
+ profile_id:
+ type: string
+ format: uuid
+ rule_id:
+ type: string
+ status:
+ type: string
+ enum: [pending, approved, disabled, expired]
+ reason:
+ type: string
+ scope:
+ $ref: '#/components/schemas/OverrideScope'
+ expires_at:
+ type: string
+ format: date-time
+ approved_by:
+ type: string
+ approved_at:
+ type: string
+ format: date-time
+ created_at:
+ type: string
+ format: date-time
+ created_by:
+ type: string
+
+ OverrideScope:
+ type: object
+ properties:
+ purl:
+ type: string
+ cve_id:
+ type: string
+ component:
+ type: string
+ environment:
+ type: string
+
+ CreateOverrideRequest:
+ type: object
+ required: [rule_id, reason]
+ properties:
+ profile_id:
+ type: string
+ format: uuid
+ rule_id:
+ type: string
+ reason:
+ type: string
+ scope:
+ $ref: '#/components/schemas/OverrideScope'
+ expires_at:
+ type: string
+ format: date-time
+
+ ApproveOverrideRequest:
+ type: object
+ properties:
+ comment:
+ type: string
+
+ # ============================================================
+ # SEALED MODE SCHEMAS
+ # ============================================================
+ SealedModeStatus:
+ type: object
+ required: [sealed, mode]
+ properties:
+ sealed:
+ type: boolean
+ mode:
+ type: string
+ enum: [online, sealed, transitioning]
+ sealed_at:
+ type: string
+ format: date-time
+ sealed_by:
+ type: string
+ bundle_version:
+ type: string
+ last_advisory_update:
+ type: string
+ format: date-time
+ time_anchor:
+ $ref: '#/components/schemas/TimeAnchor'
+
+ TimeAnchor:
+ type: object
+ required: [timestamp, valid]
+ properties:
+ timestamp:
+ type: string
+ format: date-time
+ signature:
+ type: string
+ valid:
+ type: boolean
+ expires_at:
+ type: string
+ format: date-time
+
+ SealRequest:
+ type: object
+ properties:
+ reason:
+ type: string
+ time_anchor:
+ type: string
+ format: date-time
+
+ UnsealRequest:
+ type: object
+ required: [reason]
+ properties:
+ reason:
+ type: string
+ audit_note:
+ type: string
+
+ VerifyBundleRequest:
+ type: object
+ required: [bundle_digest]
+ properties:
+ bundle_digest:
+ type: string
+ public_key:
+ type: string
+
+ BundleVerificationResult:
+ type: object
+ required: [valid]
+ properties:
+ valid:
+ type: boolean
+ bundle_digest:
+ type: string
+ signed_at:
+ type: string
+ format: date-time
+ signer_fingerprint:
+ type: string
+ errors:
+ type: array
+ items:
+ type: string
+
+ # ============================================================
+ # STALENESS SCHEMAS
+ # ============================================================
+ StalenessStatus:
+ type: object
+ required: [overall_status, sources]
+ properties:
+ overall_status:
+ type: string
+ enum: [fresh, stale, critical, unknown]
+ sources:
+ type: array
+ items:
+ $ref: '#/components/schemas/SourceStaleness'
+ last_check:
+ type: string
+ format: date-time
+
+ SourceStaleness:
+ type: object
+ required: [source_id, status, last_update]
+ properties:
+ source_id:
+ type: string
+ source_name:
+ type: string
+ status:
+ type: string
+ enum: [fresh, stale, critical, unknown]
+ last_update:
+ type: string
+ format: date-time
+ max_age_hours:
+ type: integer
+ age_hours:
+ type: number
+
+ EvaluateStalenessRequest:
+ type: object
+ required: [source_id]
+ properties:
+ source_id:
+ type: string
+ threshold_hours:
+ type: integer
+
+ StalenessEvaluation:
+ type: object
+ required: [source_id, is_stale]
+ properties:
+ source_id:
+ type: string
+ is_stale:
+ type: boolean
+ age_hours:
+ type: number
+ threshold_hours:
+ type: integer
+ recommendation:
+ type: string
+
+ # ============================================================
+ # COMMON SCHEMAS
+ # ============================================================
+ ProblemDetails:
+ type: object
+ required: [type, title, status]
+ properties:
+ type:
+ type: string
+ format: uri
+ title:
+ type: string
+ status:
+ type: integer
+ detail:
+ type: string
+ instance:
+ type: string
+ errors:
+ type: array
+ items:
+ type: object
+ properties:
+ field:
+ type: string
+ message:
+ type: string
diff --git a/docs/schemas/reachability-input.schema.json b/docs/schemas/reachability-input.schema.json
new file mode 100644
index 000000000..a3f15c788
--- /dev/null
+++ b/docs/schemas/reachability-input.schema.json
@@ -0,0 +1,564 @@
+{
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
+ "$id": "https://stella-ops.org/schemas/reachability-input.schema.json",
+ "title": "StellaOps Reachability Input Schema",
+ "description": "Schema for reachability/exploitability signals input to Policy Engine. Unblocks POLICY-ENGINE-80-001, POLICY-RISK-66-003.",
+ "type": "object",
+ "definitions": {
+ "ReachabilityInput": {
+ "type": "object",
+ "description": "Input payload for policy engine reachability evaluation",
+ "required": ["subject", "reachability_facts", "timestamp"],
+ "properties": {
+ "subject": {
+ "$ref": "#/definitions/Subject"
+ },
+ "reachability_facts": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/ReachabilityFact"
+ }
+ },
+ "exploitability_facts": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/ExploitabilityFact"
+ }
+ },
+ "callgraph_refs": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/CallgraphRef"
+ }
+ },
+ "runtime_facts": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/RuntimeFact"
+ }
+ },
+ "entropy_score": {
+ "$ref": "#/definitions/EntropyScore"
+ },
+ "timestamp": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "metadata": {
+ "type": "object",
+ "additionalProperties": true
+ }
+ }
+ },
+ "Subject": {
+ "type": "object",
+ "description": "Subject being evaluated (component + vulnerability)",
+ "required": ["purl"],
+ "properties": {
+ "purl": {
+ "type": "string",
+ "description": "Package URL of the component"
+ },
+ "cve_id": {
+ "type": "string",
+ "pattern": "^CVE-[0-9]{4}-[0-9]+$"
+ },
+ "ghsa_id": {
+ "type": "string",
+ "pattern": "^GHSA-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}$"
+ },
+ "vulnerability_id": {
+ "type": "string",
+ "description": "Internal vulnerability identifier"
+ },
+ "affected_symbols": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "description": "Vulnerable symbols/functions in the component"
+ },
+ "version_range": {
+ "type": "string",
+ "description": "Affected version range (e.g., '<1.2.3')"
+ }
+ }
+ },
+ "ReachabilityFact": {
+ "type": "object",
+ "description": "Static reachability analysis result",
+ "required": ["state", "confidence"],
+ "properties": {
+ "state": {
+ "type": "string",
+ "enum": ["reachable", "unreachable", "potentially_reachable", "unknown"],
+ "description": "Reachability state"
+ },
+ "confidence": {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 1,
+ "description": "Confidence score (0-1)"
+ },
+ "source": {
+ "type": "string",
+ "enum": ["static_analysis", "dynamic_analysis", "sbom_inference", "manual", "external"],
+ "description": "Source of the reachability determination"
+ },
+ "analyzer": {
+ "type": "string",
+ "description": "Analyzer tool that produced this fact"
+ },
+ "analyzer_version": {
+ "type": "string"
+ },
+ "call_path": {
+ "$ref": "#/definitions/CallPath"
+ },
+ "entry_points": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/EntryPoint"
+ }
+ },
+ "evidence": {
+ "$ref": "#/definitions/ReachabilityEvidence"
+ },
+ "evaluated_at": {
+ "type": "string",
+ "format": "date-time"
+ }
+ }
+ },
+ "CallPath": {
+ "type": "object",
+ "description": "Call path from entry point to vulnerable symbol",
+ "properties": {
+ "depth": {
+ "type": "integer",
+ "minimum": 0,
+ "description": "Call depth from entry point"
+ },
+ "nodes": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/CallNode"
+ }
+ },
+ "edges": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/CallEdge"
+ }
+ }
+ }
+ },
+ "CallNode": {
+ "type": "object",
+ "required": ["id", "symbol"],
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "symbol": {
+ "type": "string",
+ "description": "Fully qualified symbol name"
+ },
+ "file": {
+ "type": "string"
+ },
+ "line": {
+ "type": "integer"
+ },
+ "package": {
+ "type": "string"
+ },
+ "is_vulnerable": {
+ "type": "boolean"
+ },
+ "is_entry_point": {
+ "type": "boolean"
+ }
+ }
+ },
+ "CallEdge": {
+ "type": "object",
+ "required": ["source", "target"],
+ "properties": {
+ "source": {
+ "type": "string"
+ },
+ "target": {
+ "type": "string"
+ },
+ "call_type": {
+ "type": "string",
+ "enum": ["direct", "indirect", "virtual", "reflection", "dynamic"]
+ }
+ }
+ },
+ "EntryPoint": {
+ "type": "object",
+ "description": "Application entry point that can reach vulnerable code",
+ "required": ["type", "identifier"],
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": ["http_endpoint", "grpc_method", "cli_command", "event_handler", "scheduled_job", "main", "test"]
+ },
+ "identifier": {
+ "type": "string",
+ "description": "Entry point identifier (e.g., 'POST /api/users')"
+ },
+ "file": {
+ "type": "string"
+ },
+ "line": {
+ "type": "integer"
+ },
+ "exposed": {
+ "type": "boolean",
+ "default": true,
+ "description": "Whether this entry point is externally exposed"
+ },
+ "authentication_required": {
+ "type": "boolean"
+ }
+ }
+ },
+ "ReachabilityEvidence": {
+ "type": "object",
+ "description": "Supporting evidence for reachability determination",
+ "properties": {
+ "digest": {
+ "type": "string",
+ "pattern": "^sha256:[a-f0-9]{64}$"
+ },
+ "evidence_uri": {
+ "type": "string",
+ "format": "uri"
+ },
+ "callgraph_digest": {
+ "type": "string"
+ },
+ "sbom_digest": {
+ "type": "string"
+ },
+ "analysis_log_uri": {
+ "type": "string",
+ "format": "uri"
+ }
+ }
+ },
+ "ExploitabilityFact": {
+ "type": "object",
+ "description": "Exploitability assessment",
+ "required": ["state", "confidence"],
+ "properties": {
+ "state": {
+ "type": "string",
+ "enum": ["exploitable", "not_exploitable", "conditionally_exploitable", "unknown"]
+ },
+ "confidence": {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 1
+ },
+ "source": {
+ "type": "string",
+ "enum": ["kev", "epss", "vendor_advisory", "internal_analysis", "exploit_db"]
+ },
+ "epss_score": {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 1,
+ "description": "EPSS probability score"
+ },
+ "epss_percentile": {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 100
+ },
+ "kev_listed": {
+ "type": "boolean",
+ "description": "Listed in CISA Known Exploited Vulnerabilities"
+ },
+ "kev_due_date": {
+ "type": "string",
+ "format": "date"
+ },
+ "exploit_maturity": {
+ "type": "string",
+ "enum": ["not_defined", "unproven", "poc", "functional", "high"],
+ "description": "Exploit maturity level (per CVSS)"
+ },
+ "exploit_refs": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ "format": "uri"
+ }
+ },
+ "conditions": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/ExploitCondition"
+ },
+ "description": "Conditions required for exploitation"
+ },
+ "evaluated_at": {
+ "type": "string",
+ "format": "date-time"
+ }
+ }
+ },
+ "ExploitCondition": {
+ "type": "object",
+ "description": "Condition required for exploitation",
+ "required": ["condition", "met"],
+ "properties": {
+ "condition": {
+ "type": "string",
+ "description": "Description of the condition"
+ },
+ "met": {
+ "type": "boolean"
+ },
+ "confidence": {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 1
+ },
+ "evidence": {
+ "type": "string"
+ }
+ }
+ },
+ "CallgraphRef": {
+ "type": "object",
+ "description": "Reference to a stored callgraph",
+ "required": ["digest"],
+ "properties": {
+ "digest": {
+ "type": "string",
+ "pattern": "^sha256:[a-f0-9]{64}$"
+ },
+ "format": {
+ "type": "string",
+ "enum": ["richgraph-v1", "dot", "json-graph", "sarif"],
+ "default": "richgraph-v1"
+ },
+ "uri": {
+ "type": "string",
+ "format": "uri"
+ },
+ "generated_at": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "generator": {
+ "type": "string"
+ },
+ "generator_version": {
+ "type": "string"
+ }
+ }
+ },
+ "RuntimeFact": {
+ "type": "object",
+ "description": "Runtime observation fact",
+ "required": ["type", "observed_at"],
+ "properties": {
+ "type": {
+ "type": "string",
+ "enum": ["function_called", "function_not_called", "path_executed", "path_not_executed", "module_loaded", "module_not_loaded"]
+ },
+ "symbol": {
+ "type": "string"
+ },
+ "module": {
+ "type": "string"
+ },
+ "call_count": {
+ "type": "integer",
+ "minimum": 0
+ },
+ "last_called": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "observed_at": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "observation_window": {
+ "type": "string",
+ "description": "Duration of observation (e.g., '7d', '30d')"
+ },
+ "environment": {
+ "type": "string",
+ "enum": ["production", "staging", "development", "test"]
+ }
+ }
+ },
+ "EntropyScore": {
+ "type": "object",
+ "description": "Scanner entropy/trust score for confidence weighting",
+ "properties": {
+ "overall": {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 1,
+ "description": "Overall trust score"
+ },
+ "sbom_completeness": {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 1
+ },
+ "callgraph_coverage": {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 1
+ },
+ "runtime_coverage": {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 1
+ },
+ "analyzer_confidence": {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 1
+ },
+ "data_freshness": {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 1,
+ "description": "How recent the underlying data is"
+ }
+ }
+ },
+ "ReachabilityOutput": {
+ "type": "object",
+ "description": "Policy engine output after reachability evaluation",
+ "required": ["subject", "effective_state", "risk_adjustment"],
+ "properties": {
+ "subject": {
+ "$ref": "#/definitions/Subject"
+ },
+ "effective_state": {
+ "type": "string",
+ "enum": ["reachable", "unreachable", "potentially_reachable", "unknown"]
+ },
+ "effective_exploitability": {
+ "type": "string",
+ "enum": ["exploitable", "not_exploitable", "conditionally_exploitable", "unknown"]
+ },
+ "risk_adjustment": {
+ "type": "object",
+ "properties": {
+ "factor": {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 2,
+ "description": "Risk multiplier (0 = suppress, 1 = neutral, >1 = amplify)"
+ },
+ "severity_override": {
+ "type": "string",
+ "enum": ["critical", "high", "medium", "low", "info"]
+ },
+ "justification": {
+ "type": "string"
+ }
+ }
+ },
+ "policy_trace": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "rule_id": { "type": "string" },
+ "result": { "type": "string" },
+ "reason": { "type": "string" }
+ }
+ }
+ },
+ "evaluated_at": {
+ "type": "string",
+ "format": "date-time"
+ }
+ }
+ }
+ },
+ "properties": {
+ "inputs": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/ReachabilityInput"
+ }
+ }
+ },
+ "examples": [
+ {
+ "inputs": [
+ {
+ "subject": {
+ "purl": "pkg:npm/lodash@4.17.20",
+ "cve_id": "CVE-2021-23337",
+ "affected_symbols": ["lodash.template"]
+ },
+ "reachability_facts": [
+ {
+ "state": "reachable",
+ "confidence": 0.95,
+ "source": "static_analysis",
+ "analyzer": "stellaops-scanner",
+ "analyzer_version": "2025.10.0",
+ "call_path": {
+ "depth": 3,
+ "nodes": [
+ { "id": "n1", "symbol": "app.renderTemplate", "is_entry_point": true },
+ { "id": "n2", "symbol": "templateEngine.compile" },
+ { "id": "n3", "symbol": "lodash.template", "is_vulnerable": true }
+ ],
+ "edges": [
+ { "source": "n1", "target": "n2", "call_type": "direct" },
+ { "source": "n2", "target": "n3", "call_type": "direct" }
+ ]
+ },
+ "entry_points": [
+ {
+ "type": "http_endpoint",
+ "identifier": "POST /api/render",
+ "exposed": true,
+ "authentication_required": true
+ }
+ ],
+ "evaluated_at": "2025-12-06T10:00:00Z"
+ }
+ ],
+ "exploitability_facts": [
+ {
+ "state": "exploitable",
+ "confidence": 0.8,
+ "source": "epss",
+ "epss_score": 0.42,
+ "epss_percentile": 87,
+ "kev_listed": false,
+ "exploit_maturity": "functional",
+ "evaluated_at": "2025-12-06T10:00:00Z"
+ }
+ ],
+ "entropy_score": {
+ "overall": 0.85,
+ "sbom_completeness": 0.95,
+ "callgraph_coverage": 0.78,
+ "analyzer_confidence": 0.9
+ },
+ "timestamp": "2025-12-06T10:00:00Z"
+ }
+ ]
+ }
+ ]
+}
diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Bun/GlobalUsings.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Bun/GlobalUsings.cs
new file mode 100644
index 000000000..0f4b1c33f
--- /dev/null
+++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Bun/GlobalUsings.cs
@@ -0,0 +1,6 @@
+global using System;
+global using System.Collections.Generic;
+global using System.IO;
+global using System.Linq;
+global using System.Threading;
+global using System.Threading.Tasks;
diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Models/SurfaceAnalysisResult.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Models/SurfaceAnalysisResult.cs
deleted file mode 100644
index 175934f8a..000000000
--- a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Models/SurfaceAnalysisResult.cs
+++ /dev/null
@@ -1,171 +0,0 @@
-using StellaOps.Scanner.Surface.Discovery;
-
-namespace StellaOps.Scanner.Surface.Models;
-
-///
-/// Complete result of surface analysis for a scan.
-///
-public sealed record SurfaceAnalysisResult
-{
- ///
- /// Scan identifier.
- ///
- public required string ScanId { get; init; }
-
- ///
- /// When analysis was performed.
- ///
- public required DateTimeOffset Timestamp { get; init; }
-
- ///
- /// Analysis summary statistics.
- ///
- public required SurfaceAnalysisSummary Summary { get; init; }
-
- ///
- /// Discovered surface entries.
- ///
- public required IReadOnlyList Entries { get; init; }
-
- ///
- /// Discovered entry points.
- ///
- public IReadOnlyList? EntryPoints { get; init; }
-
- ///
- /// Analysis metadata.
- ///
- public SurfaceAnalysisMetadata? Metadata { get; init; }
-}
-
-///
-/// Summary statistics for surface analysis.
-///
-public sealed record SurfaceAnalysisSummary
-{
- ///
- /// Total number of surface entries.
- ///
- public required int TotalEntries { get; init; }
-
- ///
- /// Entry counts by type.
- ///
- public required IReadOnlyDictionary ByType { get; init; }
-
- ///
- /// Entry counts by confidence level.
- ///
- public required IReadOnlyDictionary ByConfidence { get; init; }
-
- ///
- /// Calculated risk score (0.0 - 1.0).
- ///
- public required double RiskScore { get; init; }
-
- ///
- /// High-risk entry count.
- ///
- public int HighRiskCount { get; init; }
-
- ///
- /// Total entry points discovered.
- ///
- public int? EntryPointCount { get; init; }
-
- ///
- /// Creates summary from entries.
- ///
- public static SurfaceAnalysisSummary FromEntries(IReadOnlyList entries)
- {
- var byType = entries
- .GroupBy(e => e.Type)
- .ToDictionary(g => g.Key, g => g.Count());
-
- var byConfidence = entries
- .GroupBy(e => e.Confidence)
- .ToDictionary(g => g.Key, g => g.Count());
-
- // Calculate risk score based on entry types and confidence
- var riskScore = CalculateRiskScore(entries);
-
- var highRiskCount = entries.Count(e =>
- e.Type is SurfaceType.ProcessExecution or SurfaceType.CryptoOperation or SurfaceType.SecretAccess ||
- e.Confidence == ConfidenceLevel.Verified);
-
- return new SurfaceAnalysisSummary
- {
- TotalEntries = entries.Count,
- ByType = byType,
- ByConfidence = byConfidence,
- RiskScore = riskScore,
- HighRiskCount = highRiskCount
- };
- }
-
- private static double CalculateRiskScore(IReadOnlyList entries)
- {
- if (entries.Count == 0) return 0.0;
-
- var typeWeights = new Dictionary
- {
- [SurfaceType.ProcessExecution] = 1.0,
- [SurfaceType.SecretAccess] = 0.9,
- [SurfaceType.CryptoOperation] = 0.8,
- [SurfaceType.DatabaseOperation] = 0.7,
- [SurfaceType.Deserialization] = 0.85,
- [SurfaceType.DynamicCode] = 0.9,
- [SurfaceType.AuthenticationPoint] = 0.6,
- [SurfaceType.NetworkEndpoint] = 0.5,
- [SurfaceType.InputHandling] = 0.5,
- [SurfaceType.ExternalCall] = 0.4,
- [SurfaceType.FileOperation] = 0.3
- };
-
- var confidenceMultipliers = new Dictionary
- {
- [ConfidenceLevel.Low] = 0.5,
- [ConfidenceLevel.Medium] = 0.75,
- [ConfidenceLevel.High] = 1.0,
- [ConfidenceLevel.Verified] = 1.0
- };
-
- var totalWeight = entries.Sum(e =>
- typeWeights.GetValueOrDefault(e.Type, 0.3) *
- confidenceMultipliers.GetValueOrDefault(e.Confidence, 0.5));
-
- // Normalize to 0-1 range (cap at 100 weighted entries)
- return Math.Min(1.0, totalWeight / 100.0);
- }
-}
-
-///
-/// Metadata about the surface analysis execution.
-///
-public sealed record SurfaceAnalysisMetadata
-{
- ///
- /// Analysis duration in milliseconds.
- ///
- public double DurationMs { get; init; }
-
- ///
- /// Files analyzed count.
- ///
- public int FilesAnalyzed { get; init; }
-
- ///
- /// Languages detected.
- ///
- public IReadOnlyList? Languages { get; init; }
-
- ///
- /// Frameworks detected.
- ///
- public IReadOnlyList? Frameworks { get; init; }
-
- ///
- /// Analysis configuration used.
- ///
- public SurfaceAnalysisOptions? Options { get; init; }
-}
diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Output/ISurfaceAnalysisWriter.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Output/ISurfaceAnalysisWriter.cs
deleted file mode 100644
index 1ab713525..000000000
--- a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Output/ISurfaceAnalysisWriter.cs
+++ /dev/null
@@ -1,121 +0,0 @@
-using System.Text.Json;
-using System.Text.Json.Serialization;
-using Microsoft.Extensions.Logging;
-using StellaOps.Scanner.Surface.Models;
-
-namespace StellaOps.Scanner.Surface.Output;
-
-///
-/// Interface for writing surface analysis results.
-///
-public interface ISurfaceAnalysisWriter
-{
- ///
- /// Writes analysis result to the specified stream.
- ///
- Task WriteAsync(
- SurfaceAnalysisResult result,
- Stream outputStream,
- CancellationToken cancellationToken = default);
-
- ///
- /// Serializes analysis result to JSON string.
- ///
- string Serialize(SurfaceAnalysisResult result);
-}
-
-///
-/// Store key for surface analysis results.
-///
-public static class SurfaceAnalysisStoreKeys
-{
- ///
- /// Key for storing surface analysis in scan artifacts.
- ///
- public const string SurfaceAnalysis = "scanner.surface.analysis";
-
- ///
- /// Key for storing surface entries.
- ///
- public const string SurfaceEntries = "scanner.surface.entries";
-
- ///
- /// Key for storing entry points.
- ///
- public const string EntryPoints = "scanner.surface.entrypoints";
-}
-
-///
-/// Default implementation of surface analysis writer.
-/// Uses deterministic JSON serialization.
-///
-public sealed class SurfaceAnalysisWriter : ISurfaceAnalysisWriter
-{
- private readonly ILogger _logger;
-
- private static readonly JsonSerializerOptions JsonOptions = new()
- {
- PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
- WriteIndented = false,
- DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
- Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }
- };
-
- private static readonly JsonSerializerOptions PrettyJsonOptions = new()
- {
- PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
- WriteIndented = true,
- DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
- Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }
- };
-
- public SurfaceAnalysisWriter(ILogger logger)
- {
- _logger = logger;
- }
-
- public async Task WriteAsync(
- SurfaceAnalysisResult result,
- Stream outputStream,
- CancellationToken cancellationToken = default)
- {
- // Sort entries by ID for determinism
- var sortedResult = SortResult(result);
-
- await JsonSerializer.SerializeAsync(
- outputStream,
- sortedResult,
- JsonOptions,
- cancellationToken);
-
- _logger.LogDebug(
- "Wrote surface analysis for scan {ScanId} with {EntryCount} entries",
- result.ScanId,
- result.Entries.Count);
- }
-
- public string Serialize(SurfaceAnalysisResult result)
- {
- var sortedResult = SortResult(result);
- return JsonSerializer.Serialize(sortedResult, PrettyJsonOptions);
- }
-
- private static SurfaceAnalysisResult SortResult(SurfaceAnalysisResult result)
- {
- // Sort entries by ID for deterministic output
- var sortedEntries = result.Entries
- .OrderBy(e => e.Id)
- .ToList();
-
- // Sort entry points by ID if present
- var sortedEntryPoints = result.EntryPoints?
- .OrderBy(ep => ep.Id)
- .ToList();
-
- return result with
- {
- Entries = sortedEntries,
- EntryPoints = sortedEntryPoints
- };
- }
-}
diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Output/SurfaceAnalysisWriter.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Output/SurfaceAnalysisWriter.cs
index 357480102..5be20bd94 100644
--- a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Output/SurfaceAnalysisWriter.cs
+++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Output/SurfaceAnalysisWriter.cs
@@ -80,7 +80,7 @@ public sealed class SurfaceAnalysisWriter : ISurfaceAnalysisWriter
var jsonOptions = options.PrettyPrint ? s_prettyJsonOptions : s_jsonOptions;
- if (options.WriteToFile && \!string.IsNullOrEmpty(options.OutputPath))
+ if (options.WriteToFile && !string.IsNullOrEmpty(options.OutputPath))
{
var filePath = Path.Combine(options.OutputPath, $"surface-{result.ScanId}.json");
await using var stream = File.Create(filePath);
diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/ServiceCollectionExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface/ServiceCollectionExtensions.cs
deleted file mode 100644
index f7de70780..000000000
--- a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/ServiceCollectionExtensions.cs
+++ /dev/null
@@ -1,153 +0,0 @@
-using Microsoft.Extensions.Configuration;
-using Microsoft.Extensions.DependencyInjection;
-using Microsoft.Extensions.DependencyInjection.Extensions;
-using StellaOps.Scanner.Surface.Discovery;
-using StellaOps.Scanner.Surface.Output;
-using StellaOps.Scanner.Surface.Signals;
-
-namespace StellaOps.Scanner.Surface;
-
-///
-/// Extension methods for registering surface analysis services.
-///
-public static class ServiceCollectionExtensions
-{
- ///
- /// Adds surface analysis services to the service collection.
- ///
- public static IServiceCollection AddSurfaceAnalysis(
- this IServiceCollection services,
- IConfiguration? configuration = null)
- {
- // Core services
- services.TryAddSingleton();
- services.TryAddSingleton();
- services.TryAddSingleton();
- services.TryAddSingleton();
-
- // Configure options if configuration provided
- if (configuration != null)
- {
- services.Configure(
- configuration.GetSection("Scanner:Surface"));
- }
-
- return services;
- }
-
- ///
- /// Adds surface analysis services with a signal sink.
- ///
- public static IServiceCollection AddSurfaceAnalysis(
- this IServiceCollection services,
- IConfiguration? configuration = null)
- where TSignalSink : class, ISurfaceSignalSink
- {
- services.AddSurfaceAnalysis(configuration);
- services.TryAddSingleton();
- return services;
- }
-
- ///
- /// Adds surface analysis services with in-memory signal sink for testing.
- ///
- public static IServiceCollection AddSurfaceAnalysisForTesting(this IServiceCollection services)
- {
- services.AddSurfaceAnalysis();
- services.TryAddSingleton();
- return services;
- }
-
- ///
- /// Registers a surface entry collector.
- ///
- public static IServiceCollection AddSurfaceCollector(this IServiceCollection services)
- where TCollector : class, ISurfaceEntryCollector
- {
- services.AddSingleton();
- return services;
- }
-
- ///
- /// Registers multiple surface entry collectors.
- ///
- public static IServiceCollection AddSurfaceCollectors(
- this IServiceCollection services,
- params Type[] collectorTypes)
- {
- foreach (var type in collectorTypes)
- {
- if (!typeof(ISurfaceEntryCollector).IsAssignableFrom(type))
- {
- throw new ArgumentException(
- $"Type {type.Name} does not implement ISurfaceEntryCollector",
- nameof(collectorTypes));
- }
-
- services.AddSingleton(typeof(ISurfaceEntryCollector), type);
- }
-
- return services;
- }
-}
-
-///
-/// Builder for configuring surface analysis.
-///
-public sealed class SurfaceAnalysisBuilder
-{
- private readonly IServiceCollection _services;
-
- internal SurfaceAnalysisBuilder(IServiceCollection services)
- {
- _services = services;
- }
-
- ///
- /// Registers a collector.
- ///
- public SurfaceAnalysisBuilder AddCollector()
- where TCollector : class, ISurfaceEntryCollector
- {
- _services.AddSurfaceCollector();
- return this;
- }
-
- ///
- /// Configures a custom signal sink.
- ///
- public SurfaceAnalysisBuilder UseSignalSink()
- where TSignalSink : class, ISurfaceSignalSink
- {
- _services.TryAddSingleton();
- return this;
- }
-
- ///
- /// Configures options.
- ///
- public SurfaceAnalysisBuilder Configure(Action configure)
- {
- _services.Configure(configure);
- return this;
- }
-}
-
-///
-/// Extension for fluent builder pattern.
-///
-public static class SurfaceAnalysisBuilderExtensions
-{
- ///
- /// Adds surface analysis with fluent configuration.
- ///
- public static IServiceCollection AddSurfaceAnalysis(
- this IServiceCollection services,
- Action configure)
- {
- services.AddSurfaceAnalysis();
- var builder = new SurfaceAnalysisBuilder(services);
- configure(builder);
- return services;
- }
-}
diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Signals/ISurfaceSignalEmitter.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Signals/ISurfaceSignalEmitter.cs
deleted file mode 100644
index 111006e78..000000000
--- a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Signals/ISurfaceSignalEmitter.cs
+++ /dev/null
@@ -1,177 +0,0 @@
-using Microsoft.Extensions.Logging;
-using StellaOps.Scanner.Surface.Models;
-
-namespace StellaOps.Scanner.Surface.Signals;
-
-///
-/// Interface for emitting surface analysis signals for policy evaluation.
-///
-public interface ISurfaceSignalEmitter
-{
- ///
- /// Emits signals for the given analysis result.
- ///
- Task EmitAsync(
- string scanId,
- SurfaceAnalysisResult result,
- CancellationToken cancellationToken = default);
-
- ///
- /// Emits custom signals.
- ///
- Task EmitAsync(
- string scanId,
- IDictionary signals,
- CancellationToken cancellationToken = default);
-}
-
-///
-/// Default implementation of surface signal emitter.
-/// Converts analysis results to policy signals.
-///
-public sealed class SurfaceSignalEmitter : ISurfaceSignalEmitter
-{
- private readonly ILogger _logger;
- private readonly ISurfaceSignalSink? _sink;
-
- public SurfaceSignalEmitter(
- ILogger logger,
- ISurfaceSignalSink? sink = null)
- {
- _logger = logger;
- _sink = sink;
- }
-
- public async Task EmitAsync(
- string scanId,
- SurfaceAnalysisResult result,
- CancellationToken cancellationToken = default)
- {
- var signals = BuildSignals(result);
- await EmitAsync(scanId, signals, cancellationToken);
- }
-
- public async Task EmitAsync(
- string scanId,
- IDictionary signals,
- CancellationToken cancellationToken = default)
- {
- _logger.LogDebug(
- "Emitting {SignalCount} surface signals for scan {ScanId}",
- signals.Count,
- scanId);
-
- if (_sink != null)
- {
- await _sink.WriteAsync(scanId, signals, cancellationToken);
- }
- else
- {
- _logger.LogDebug(
- "No signal sink configured, signals for scan {ScanId}: {Signals}",
- scanId,
- string.Join(", ", signals.Select(kv => $"{kv.Key}={kv.Value}")));
- }
- }
-
- private static Dictionary BuildSignals(SurfaceAnalysisResult result)
- {
- var signals = new Dictionary
- {
- [SurfaceSignalKeys.TotalSurfaceArea] = result.Summary.TotalEntries,
- [SurfaceSignalKeys.RiskScore] = result.Summary.RiskScore,
- [SurfaceSignalKeys.HighConfidenceCount] = result.Entries
- .Count(e => e.Confidence >= ConfidenceLevel.High)
- };
-
- // Add counts by type
- foreach (var (type, count) in result.Summary.ByType)
- {
- var key = type switch
- {
- SurfaceType.NetworkEndpoint => SurfaceSignalKeys.NetworkEndpoints,
- SurfaceType.FileOperation => SurfaceSignalKeys.FileOperations,
- SurfaceType.ProcessExecution => SurfaceSignalKeys.ProcessSpawns,
- SurfaceType.CryptoOperation => SurfaceSignalKeys.CryptoUsage,
- SurfaceType.AuthenticationPoint => SurfaceSignalKeys.AuthPoints,
- SurfaceType.InputHandling => SurfaceSignalKeys.InputHandlers,
- SurfaceType.SecretAccess => SurfaceSignalKeys.SecretAccess,
- SurfaceType.ExternalCall => SurfaceSignalKeys.ExternalCalls,
- SurfaceType.DatabaseOperation => SurfaceSignalKeys.DatabaseOperations,
- SurfaceType.Deserialization => SurfaceSignalKeys.DeserializationPoints,
- SurfaceType.DynamicCode => SurfaceSignalKeys.DynamicCodePoints,
- _ => $"{SurfaceSignalKeys.Prefix}{type.ToString().ToLowerInvariant()}"
- };
-
- signals[key] = count;
- }
-
- // Add entry point count if available
- if (result.EntryPoints is { Count: > 0 })
- {
- signals[SurfaceSignalKeys.EntryPointCount] = result.EntryPoints.Count;
- }
-
- // Add framework signals if metadata available
- if (result.Metadata?.Frameworks is { Count: > 0 } frameworks)
- {
- foreach (var framework in frameworks)
- {
- var normalizedName = framework.ToLowerInvariant().Replace(" ", "_").Replace(".", "_");
- signals[$"{SurfaceSignalKeys.FrameworkPrefix}{normalizedName}"] = true;
- }
- }
-
- // Add language signals if metadata available
- if (result.Metadata?.Languages is { Count: > 0 } languages)
- {
- foreach (var language in languages)
- {
- var normalizedName = language.ToLowerInvariant();
- signals[$"{SurfaceSignalKeys.LanguagePrefix}{normalizedName}"] = true;
- }
- }
-
- return signals;
- }
-}
-
-///
-/// Sink for writing surface signals to storage.
-///
-public interface ISurfaceSignalSink
-{
- ///
- /// Writes signals to storage.
- ///
- Task WriteAsync(
- string scanId,
- IDictionary signals,
- CancellationToken cancellationToken = default);
-}
-
-///
-/// In-memory signal sink for testing.
-///
-public sealed class InMemorySurfaceSignalSink : ISurfaceSignalSink
-{
- private readonly Dictionary> _signals = new();
-
- public IReadOnlyDictionary> Signals => _signals;
-
- public Task WriteAsync(
- string scanId,
- IDictionary signals,
- CancellationToken cancellationToken = default)
- {
- _signals[scanId] = new Dictionary(signals);
- return Task.CompletedTask;
- }
-
- public IDictionary? GetSignals(string scanId)
- {
- return _signals.TryGetValue(scanId, out var signals) ? signals : null;
- }
-
- public void Clear() => _signals.Clear();
-}
diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Signals/SurfaceSignalKeys.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Signals/SurfaceSignalKeys.cs
deleted file mode 100644
index 3be53877f..000000000
--- a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/Signals/SurfaceSignalKeys.cs
+++ /dev/null
@@ -1,64 +0,0 @@
-namespace StellaOps.Scanner.Surface.Signals;
-
-///
-/// Standard signal keys for surface analysis policy integration.
-///
-public static class SurfaceSignalKeys
-{
- /// Prefix for all surface signals.
- public const string Prefix = "surface.";
-
- /// Network endpoint count.
- public const string NetworkEndpoints = "surface.network.endpoints";
-
- /// Exposed port count.
- public const string ExposedPorts = "surface.network.ports";
-
- /// File operation count.
- public const string FileOperations = "surface.file.operations";
-
- /// Process spawn count.
- public const string ProcessSpawns = "surface.process.spawns";
-
- /// Crypto operation count.
- public const string CryptoUsage = "surface.crypto.usage";
-
- /// Authentication point count.
- public const string AuthPoints = "surface.auth.points";
-
- /// Input handler count.
- public const string InputHandlers = "surface.input.handlers";
-
- /// Secret access point count.
- public const string SecretAccess = "surface.secrets.access";
-
- /// External call count.
- public const string ExternalCalls = "surface.external.calls";
-
- /// Database operation count.
- public const string DatabaseOperations = "surface.database.operations";
-
- /// Deserialization point count.
- public const string DeserializationPoints = "surface.deserialization.points";
-
- /// Dynamic code execution count.
- public const string DynamicCodePoints = "surface.dynamic.code";
-
- /// Total surface area score.
- public const string TotalSurfaceArea = "surface.total.area";
-
- /// Overall risk score (0.0-1.0).
- public const string RiskScore = "surface.risk.score";
-
- /// High-confidence entry count.
- public const string HighConfidenceCount = "surface.high_confidence.count";
-
- /// Entry point count.
- public const string EntryPointCount = "surface.entry_points.count";
-
- /// Framework-specific prefix.
- public const string FrameworkPrefix = "surface.framework.";
-
- /// Language-specific prefix.
- public const string LanguagePrefix = "surface.language.";
-}
diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/StellaOps.Scanner.Surface.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Surface/StellaOps.Scanner.Surface.csproj
index 2d9dd5b2c..17fe762b1 100644
--- a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/StellaOps.Scanner.Surface.csproj
+++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface/StellaOps.Scanner.Surface.csproj
@@ -16,7 +16,6 @@
-
diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/SurfaceAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface/SurfaceAnalyzer.cs
index 0e4062ef9..665996689 100644
--- a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/SurfaceAnalyzer.cs
+++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface/SurfaceAnalyzer.cs
@@ -7,21 +7,41 @@ using StellaOps.Scanner.Surface.Signals;
namespace StellaOps.Scanner.Surface;
///
-/// Main interface for surface analysis operations.
+/// Options for surface analysis execution.
+///
+public sealed record SurfaceAnalysisOptions
+{
+ /// Collector options.
+ public SurfaceCollectorOptions CollectorOptions { get; init; } = new();
+
+ /// Output options.
+ public SurfaceOutputOptions OutputOptions { get; init; } = new();
+
+ /// Whether to emit policy signals.
+ public bool EmitSignals { get; init; } = true;
+
+ /// Whether to discover entry points.
+ public bool DiscoverEntryPoints { get; init; } = true;
+
+ /// Languages to analyze for entry points.
+ public IReadOnlySet Languages { get; init; } = new HashSet();
+}
+
+///
+/// Interface for orchestrating surface analysis.
///
public interface ISurfaceAnalyzer
{
- ///
- /// Performs surface analysis on the given context.
- ///
+ /// Runs surface analysis on the specified path.
Task AnalyzeAsync(
- SurfaceCollectionContext context,
+ string scanId,
+ string rootPath,
+ SurfaceAnalysisOptions? options = null,
CancellationToken cancellationToken = default);
}
///
-/// Default implementation of surface analyzer.
-/// Coordinates collectors, signal emission, and output writing.
+/// Default surface analyzer implementation.
///
public sealed class SurfaceAnalyzer : ISurfaceAnalyzer
{
@@ -43,59 +63,152 @@ public sealed class SurfaceAnalyzer : ISurfaceAnalyzer
}
public async Task AnalyzeAsync(
- SurfaceCollectionContext context,
+ string scanId,
+ string rootPath,
+ SurfaceAnalysisOptions? options = null,
CancellationToken cancellationToken = default)
{
- ArgumentNullException.ThrowIfNull(context);
+ ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
+ ArgumentException.ThrowIfNullOrWhiteSpace(rootPath);
+ options ??= new SurfaceAnalysisOptions();
- var startTime = DateTimeOffset.UtcNow;
+ _logger.LogInformation("Starting surface analysis for scan {ScanId} at {RootPath}", scanId, rootPath);
- _logger.LogInformation(
- "Starting surface analysis for scan {ScanId} with {FileCount} files",
- context.ScanId,
- context.Files.Count);
-
- // Collect entries from all applicable collectors
- var entries = new List();
- await foreach (var entry in _registry.CollectAllAsync(context, cancellationToken))
+ var context = new SurfaceCollectorContext
{
- entries.Add(entry);
+ ScanId = scanId,
+ RootPath = rootPath,
+ Options = options.CollectorOptions
+ };
+
+ // Collect surface entries
+ var entries = new List();
+ var collectors = _registry.GetCollectors();
+
+ _logger.LogDebug("Running {CollectorCount} surface collectors", collectors.Count);
+
+ foreach (var collector in collectors)
+ {
+ cancellationToken.ThrowIfCancellationRequested();
+
+ try
+ {
+ await foreach (var entry in collector.CollectAsync(context, cancellationToken))
+ {
+ entries.Add(entry);
+ }
+ _logger.LogDebug("Collector {CollectorId} found {Count} entries", collector.CollectorId, entries.Count);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogWarning(ex, "Collector {CollectorId} failed", collector.CollectorId);
+ }
}
- _logger.LogDebug(
- "Collected {EntryCount} surface entries for scan {ScanId}",
- entries.Count,
- context.ScanId);
+ // Collect entry points
+ var entryPoints = new List();
+ if (options.DiscoverEntryPoints)
+ {
+ var epCollectors = _registry.GetEntryPointCollectors();
+ _logger.LogDebug("Running {Count} entry point collectors", epCollectors.Count);
+
+ foreach (var collector in epCollectors)
+ {
+ cancellationToken.ThrowIfCancellationRequested();
+
+ try
+ {
+ await foreach (var ep in collector.CollectAsync(context, cancellationToken))
+ {
+ entryPoints.Add(ep);
+ }
+ }
+ catch (Exception ex)
+ {
+ _logger.LogWarning(ex, "Entry point collector {CollectorId} failed", collector.CollectorId);
+ }
+ }
+ }
+
+ // Sort entries by ID for determinism
+ entries.Sort((a, b) => string.Compare(a.Id, b.Id, StringComparison.Ordinal));
+ entryPoints.Sort((a, b) => string.Compare(a.Id, b.Id, StringComparison.Ordinal));
// Build summary
- var summary = SurfaceAnalysisSummary.FromEntries(entries);
+ var byType = entries.GroupBy(e => e.Type).ToDictionary(g => g.Key, g => g.Count());
+ var summary = new SurfaceAnalysisSummary
+ {
+ TotalEntries = entries.Count,
+ ByType = byType,
+ RiskScore = CalculateRiskScore(entries, entryPoints)
+ };
- // Create result
var result = new SurfaceAnalysisResult
{
- ScanId = context.ScanId,
+ ScanId = scanId,
Timestamp = DateTimeOffset.UtcNow,
Summary = summary,
Entries = entries,
- Metadata = new SurfaceAnalysisMetadata
- {
- DurationMs = (DateTimeOffset.UtcNow - startTime).TotalMilliseconds,
- FilesAnalyzed = context.Files.Count,
- Languages = context.DetectedLanguages,
- Frameworks = context.DetectedFrameworks,
- Options = context.Options
- }
+ EntryPoints = entryPoints
};
- // Emit signals for policy evaluation
- await _signalEmitter.EmitAsync(context.ScanId, result, cancellationToken);
+ // Emit signals
+ if (options.EmitSignals)
+ {
+ var signals = SurfaceSignalEmitter.BuildSignals(result);
+ await _signalEmitter.EmitAsync(scanId, signals, cancellationToken);
+ }
+
+ // Write output
+ await _writer.WriteAsync(result, options.OutputOptions, cancellationToken);
_logger.LogInformation(
- "Completed surface analysis for scan {ScanId}: {TotalEntries} entries, risk score {RiskScore:F2}",
- context.ScanId,
- result.Summary.TotalEntries,
- result.Summary.RiskScore);
+ "Surface analysis complete: {EntryCount} entries, {EntryPointCount} entry points, risk score {RiskScore:F2}",
+ entries.Count, entryPoints.Count, summary.RiskScore);
return result;
}
+
+ private static double CalculateRiskScore(IReadOnlyList entries, IReadOnlyList entryPoints)
+ {
+ if (entries.Count == 0 && entryPoints.Count == 0)
+ return 0.0;
+
+ // Weight high-risk types more heavily
+ var riskWeights = new Dictionary
+ {
+ [SurfaceType.SecretAccess] = 1.0,
+ [SurfaceType.AuthenticationPoint] = 0.9,
+ [SurfaceType.ProcessExecution] = 0.8,
+ [SurfaceType.CryptoOperation] = 0.7,
+ [SurfaceType.ExternalCall] = 0.6,
+ [SurfaceType.NetworkEndpoint] = 0.5,
+ [SurfaceType.InputHandling] = 0.5,
+ [SurfaceType.FileOperation] = 0.3
+ };
+
+ double totalWeight = 0;
+ double weightedSum = 0;
+
+ foreach (var entry in entries)
+ {
+ var weight = riskWeights.GetValueOrDefault(entry.Type, 0.5);
+ var confidence = entry.Confidence switch
+ {
+ ConfidenceLevel.VeryHigh => 1.0,
+ ConfidenceLevel.High => 0.8,
+ ConfidenceLevel.Medium => 0.5,
+ ConfidenceLevel.Low => 0.2,
+ _ => 0.5
+ };
+ weightedSum += weight * confidence;
+ totalWeight += 1.0;
+ }
+
+ // Entry points add to risk
+ weightedSum += entryPoints.Count * 0.3;
+ totalWeight += entryPoints.Count * 0.5;
+
+ return totalWeight > 0 ? Math.Min(1.0, weightedSum / totalWeight) : 0.0;
+ }
}
diff --git a/src/Scanner/__Libraries/StellaOps.Scanner.Surface/SurfaceServiceCollectionExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Surface/SurfaceServiceCollectionExtensions.cs
new file mode 100644
index 000000000..5261dfdce
--- /dev/null
+++ b/src/Scanner/__Libraries/StellaOps.Scanner.Surface/SurfaceServiceCollectionExtensions.cs
@@ -0,0 +1,41 @@
+using Microsoft.Extensions.DependencyInjection;
+using StellaOps.Scanner.Surface.Discovery;
+using StellaOps.Scanner.Surface.Output;
+using StellaOps.Scanner.Surface.Signals;
+
+namespace StellaOps.Scanner.Surface;
+
+///
+/// DI registration extensions for Scanner Surface analysis.
+///
+public static class SurfaceServiceCollectionExtensions
+{
+ /// Adds surface analysis services to the service collection.
+ public static IServiceCollection AddSurfaceAnalysis(this IServiceCollection services)
+ {
+ ArgumentNullException.ThrowIfNull(services);
+
+ services.AddSingleton();
+ services.AddSingleton();
+ services.AddSingleton();
+ services.AddSingleton();
+
+ return services;
+ }
+
+ /// Adds a surface entry collector.
+ public static IServiceCollection AddSurfaceCollector(this IServiceCollection services)
+ where T : class, ISurfaceEntryCollector
+ {
+ services.AddSingleton();
+ return services;
+ }
+
+ /// Adds an entry point collector.
+ public static IServiceCollection AddEntryPointCollector(this IServiceCollection services)
+ where T : class, IEntryPointCollector
+ {
+ services.AddSingleton();
+ return services;
+ }
+}
diff --git a/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/MongoGraphJobStore.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/MongoGraphJobStore.cs
deleted file mode 100644
index 25ded5755..000000000
--- a/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/MongoGraphJobStore.cs
+++ /dev/null
@@ -1,82 +0,0 @@
-using System.Collections.Generic;
-using StellaOps.Scheduler.Models;
-using StellaOps.Scheduler.Storage.Mongo.Repositories;
-
-namespace StellaOps.Scheduler.WebService.GraphJobs;
-
-internal sealed class MongoGraphJobStore : IGraphJobStore
-{
- private readonly IGraphJobRepository _repository;
-
- public MongoGraphJobStore(IGraphJobRepository repository)
- {
- _repository = repository ?? throw new ArgumentNullException(nameof(repository));
- }
-
- public async ValueTask AddAsync(GraphBuildJob job, CancellationToken cancellationToken)
- {
- await _repository.InsertAsync(job, cancellationToken);
- return job;
- }
-
- public async ValueTask AddAsync(GraphOverlayJob job, CancellationToken cancellationToken)
- {
- await _repository.InsertAsync(job, cancellationToken);
- return job;
- }
-
- public async ValueTask GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken)
- {
- var normalized = query.Normalize();
- var builds = normalized.Type is null or GraphJobQueryType.Build
- ? await _repository.ListBuildJobsAsync(tenantId, normalized.Status, normalized.Limit ?? 50, cancellationToken)
- : Array.Empty();
-
- var overlays = normalized.Type is null or GraphJobQueryType.Overlay
- ? await _repository.ListOverlayJobsAsync(tenantId, normalized.Status, normalized.Limit ?? 50, cancellationToken)
- : Array.Empty();
-
- return GraphJobCollection.From(builds, overlays);
- }
-
- public async ValueTask GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
- => await _repository.GetBuildJobAsync(tenantId, jobId, cancellationToken);
-
- public async ValueTask GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
- => await _repository.GetOverlayJobAsync(tenantId, jobId, cancellationToken);
-
- public async ValueTask> UpdateAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
- {
- if (await _repository.TryReplaceAsync(job, expectedStatus, cancellationToken).ConfigureAwait(false))
- {
- return GraphJobUpdateResult.UpdatedResult(job);
- }
-
- var existing = await _repository.GetBuildJobAsync(job.TenantId, job.Id, cancellationToken).ConfigureAwait(false);
- if (existing is null)
- {
- throw new KeyNotFoundException($"Graph build job '{job.Id}' not found.");
- }
-
- return GraphJobUpdateResult.NotUpdated(existing);
- }
-
- public async ValueTask> UpdateAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
- {
- if (await _repository.TryReplaceOverlayAsync(job, expectedStatus, cancellationToken).ConfigureAwait(false))
- {
- return GraphJobUpdateResult.UpdatedResult(job);
- }
-
- var existing = await _repository.GetOverlayJobAsync(job.TenantId, job.Id, cancellationToken).ConfigureAwait(false);
- if (existing is null)
- {
- throw new KeyNotFoundException($"Graph overlay job '{job.Id}' not found.");
- }
-
- return GraphJobUpdateResult.NotUpdated(existing);
- }
-
- public async ValueTask> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken)
- => await _repository.ListOverlayJobsAsync(tenantId, cancellationToken);
-}
diff --git a/src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunService.cs b/src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunService.cs
index 4f862e375..247847920 100644
--- a/src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunService.cs
+++ b/src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunService.cs
@@ -5,7 +5,7 @@ using System.ComponentModel.DataAnnotations;
using System.Linq;
using Microsoft.Extensions.Logging;
using StellaOps.Scheduler.Models;
-using StellaOps.Scheduler.Storage.Mongo.Repositories;
+using StellaOps.Scheduler.Storage.Postgres.Repositories;
using StellaOps.Scheduler.WebService;
namespace StellaOps.Scheduler.WebService.PolicyRuns;
diff --git a/src/Scheduler/StellaOps.Scheduler.WebService/PolicySimulations/PolicySimulationMetricsProvider.cs b/src/Scheduler/StellaOps.Scheduler.WebService/PolicySimulations/PolicySimulationMetricsProvider.cs
index 8d4ee9cdb..b3ac54d96 100644
--- a/src/Scheduler/StellaOps.Scheduler.WebService/PolicySimulations/PolicySimulationMetricsProvider.cs
+++ b/src/Scheduler/StellaOps.Scheduler.WebService/PolicySimulations/PolicySimulationMetricsProvider.cs
@@ -6,7 +6,7 @@ using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Scheduler.Models;
-using StellaOps.Scheduler.Storage.Mongo.Repositories;
+using StellaOps.Scheduler.Storage.Postgres.Repositories;
namespace StellaOps.Scheduler.WebService.PolicySimulations;
diff --git a/src/Scheduler/StellaOps.Scheduler.WebService/Runs/InMemoryRunRepository.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Runs/InMemoryRunRepository.cs
index 33a3e4b78..afef62e0b 100644
--- a/src/Scheduler/StellaOps.Scheduler.WebService/Runs/InMemoryRunRepository.cs
+++ b/src/Scheduler/StellaOps.Scheduler.WebService/Runs/InMemoryRunRepository.cs
@@ -3,7 +3,7 @@ using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using StellaOps.Scheduler.Models;
-using StellaOps.Scheduler.Storage.Mongo.Repositories;
+using StellaOps.Scheduler.Storage.Postgres.Repositories;
namespace StellaOps.Scheduler.WebService.Runs;
diff --git a/src/Scheduler/StellaOps.Scheduler.WebService/Runs/RunEndpoints.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Runs/RunEndpoints.cs
index b2ce7deaa..06b88577b 100644
--- a/src/Scheduler/StellaOps.Scheduler.WebService/Runs/RunEndpoints.cs
+++ b/src/Scheduler/StellaOps.Scheduler.WebService/Runs/RunEndpoints.cs
@@ -10,8 +10,7 @@ using Microsoft.AspNetCore.Routing;
using Microsoft.Extensions.Primitives;
using StellaOps.Scheduler.ImpactIndex;
using StellaOps.Scheduler.Models;
-using StellaOps.Scheduler.Storage.Mongo.Repositories;
-using StellaOps.Scheduler.Storage.Mongo.Services;
+using StellaOps.Scheduler.Storage.Postgres.Repositories;
using StellaOps.Scheduler.WebService.Auth;
namespace StellaOps.Scheduler.WebService.Runs;
diff --git a/src/Scheduler/StellaOps.Scheduler.WebService/Runs/RunStreamCoordinator.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Runs/RunStreamCoordinator.cs
index c6dfe890d..fc1ca291e 100644
--- a/src/Scheduler/StellaOps.Scheduler.WebService/Runs/RunStreamCoordinator.cs
+++ b/src/Scheduler/StellaOps.Scheduler.WebService/Runs/RunStreamCoordinator.cs
@@ -9,7 +9,7 @@ using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scheduler.Models;
-using StellaOps.Scheduler.Storage.Mongo.Repositories;
+using StellaOps.Scheduler.Storage.Postgres.Repositories;
namespace StellaOps.Scheduler.WebService.Runs;
diff --git a/src/Scheduler/StellaOps.Scheduler.WebService/SchedulerEndpointHelpers.cs b/src/Scheduler/StellaOps.Scheduler.WebService/SchedulerEndpointHelpers.cs
index 1f5e385fb..4de117e7b 100644
--- a/src/Scheduler/StellaOps.Scheduler.WebService/SchedulerEndpointHelpers.cs
+++ b/src/Scheduler/StellaOps.Scheduler.WebService/SchedulerEndpointHelpers.cs
@@ -2,8 +2,7 @@ using System.ComponentModel.DataAnnotations;
using System.Globalization;
using System.Text;
using StellaOps.Scheduler.Models;
-using StellaOps.Scheduler.Storage.Mongo.Repositories;
-using StellaOps.Scheduler.Storage.Mongo.Services;
+using StellaOps.Scheduler.Storage.Postgres.Repositories;
namespace StellaOps.Scheduler.WebService;
diff --git a/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/InMemorySchedulerServices.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/InMemorySchedulerServices.cs
index b29038a37..5cd1b55b8 100644
--- a/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/InMemorySchedulerServices.cs
+++ b/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/InMemorySchedulerServices.cs
@@ -2,9 +2,7 @@ using System.Collections.Concurrent;
using System.Collections.Immutable;
using MongoDB.Driver;
using StellaOps.Scheduler.Models;
-using StellaOps.Scheduler.Storage.Mongo.Projections;
-using StellaOps.Scheduler.Storage.Mongo.Repositories;
-using StellaOps.Scheduler.Storage.Mongo.Services;
+using StellaOps.Scheduler.Storage.Postgres.Repositories;
namespace StellaOps.Scheduler.WebService.Schedules;
diff --git a/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/ScheduleContracts.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/ScheduleContracts.cs
index 6af961ec9..7998ab3ae 100644
--- a/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/ScheduleContracts.cs
+++ b/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/ScheduleContracts.cs
@@ -2,7 +2,7 @@ using System.Collections.Immutable;
using System.ComponentModel.DataAnnotations;
using System.Text.Json.Serialization;
using StellaOps.Scheduler.Models;
-using StellaOps.Scheduler.Storage.Mongo.Projections;
+using StellaOps.Scheduler.Storage.Postgres.Repositories;
namespace StellaOps.Scheduler.WebService.Schedules;
diff --git a/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/ScheduleEndpoints.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/ScheduleEndpoints.cs
index cb443492e..bce6503fc 100644
--- a/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/ScheduleEndpoints.cs
+++ b/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/ScheduleEndpoints.cs
@@ -6,8 +6,7 @@ using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Routing;
using Microsoft.Extensions.Logging;
using StellaOps.Scheduler.Models;
-using StellaOps.Scheduler.Storage.Mongo.Repositories;
-using StellaOps.Scheduler.Storage.Mongo.Services;
+using StellaOps.Scheduler.Storage.Postgres.Repositories;
using StellaOps.Scheduler.WebService.Auth;
namespace StellaOps.Scheduler.WebService.Schedules;
diff --git a/src/Scheduler/Tools/Scheduler.Backfill/Program.cs b/src/Scheduler/Tools/Scheduler.Backfill/Program.cs
index 289e11b57..e092ab16c 100644
--- a/src/Scheduler/Tools/Scheduler.Backfill/Program.cs
+++ b/src/Scheduler/Tools/Scheduler.Backfill/Program.cs
@@ -1,14 +1,12 @@
-using System.Text.Json;
-using MongoDB.Bson;
-using MongoDB.Bson.Serialization;
-using MongoDB.Driver;
+
using Npgsql;
using Scheduler.Backfill;
using StellaOps.Scheduler.Models;
-using StellaOps.Scheduler.Storage.Mongo.Options;
+using StellaOps.Scheduler.Storage.Postgres;
+using StellaOps.Scheduler.Storage.Postgres.Repositories;
var parsed = ParseArgs(args);
-var options = BackfillOptions.From(parsed.MongoConnection, parsed.MongoDatabase, parsed.PostgresConnection, parsed.BatchSize, parsed.DryRun);
+var options = BackfillOptions.From(parsed.PostgresConnection, parsed.BatchSize, parsed.DryRun);
var runner = new BackfillRunner(options);
await runner.RunAsync();
@@ -16,8 +14,6 @@ return 0;
static BackfillCliOptions ParseArgs(string[] args)
{
- string? mongo = null;
- string? mongoDb = null;
string? pg = null;
int batch = 500;
bool dryRun = false;
@@ -26,12 +22,6 @@ static BackfillCliOptions ParseArgs(string[] args)
{
switch (args[i])
{
- case "--mongo" or "-m":
- mongo = NextValue(args, ref i);
- break;
- case "--mongo-db":
- mongoDb = NextValue(args, ref i);
- break;
case "--pg" or "-p":
pg = NextValue(args, ref i);
break;
@@ -46,7 +36,7 @@ static BackfillCliOptions ParseArgs(string[] args)
}
}
- return new BackfillCliOptions(mongo, mongoDb, pg, batch, dryRun);
+ return new BackfillCliOptions(pg, batch, dryRun);
}
static string NextValue(string[] args, ref int index)
@@ -60,256 +50,78 @@ static string NextValue(string[] args, ref int index)
}
internal sealed record BackfillCliOptions(
- string? MongoConnection,
- string? MongoDatabase,
string? PostgresConnection,
int BatchSize,
bool DryRun);
internal sealed record BackfillOptions(
- string MongoConnectionString,
- string MongoDatabase,
string PostgresConnectionString,
int BatchSize,
bool DryRun)
{
- public static BackfillOptions From(string? mongoConn, string? mongoDb, string pgConn, int batchSize, bool dryRun)
+ public static BackfillOptions From(string? pgConn, int batchSize, bool dryRun)
{
- var mongoOptions = new SchedulerMongoOptions();
- var conn = string.IsNullOrWhiteSpace(mongoConn)
- ? Environment.GetEnvironmentVariable("MONGO_CONNECTION_STRING") ?? mongoOptions.ConnectionString
- : mongoConn;
-
- var database = string.IsNullOrWhiteSpace(mongoDb)
- ? Environment.GetEnvironmentVariable("MONGO_DATABASE") ?? mongoOptions.Database
- : mongoDb!;
-
var pg = string.IsNullOrWhiteSpace(pgConn)
- ? throw new ArgumentException("PostgreSQL connection string is required (--pg or POSTGRES_CONNECTION_STRING)")
+ ? Environment.GetEnvironmentVariable("POSTGRES_CONNECTION_STRING")
: pgConn;
- if (string.IsNullOrWhiteSpace(pg) && Environment.GetEnvironmentVariable("POSTGRES_CONNECTION_STRING") is { } envPg)
- {
- pg = envPg;
- }
-
if (string.IsNullOrWhiteSpace(pg))
{
- throw new ArgumentException("PostgreSQL connection string is required.");
+ throw new ArgumentException("PostgreSQL connection string is required (--pg or POSTGRES_CONNECTION_STRING)");
}
- return new BackfillOptions(conn, database, pg, Math.Max(50, batchSize), dryRun);
+ return new BackfillOptions(pg!, Math.Max(50, batchSize), dryRun);
}
}
internal sealed class BackfillRunner
{
private readonly BackfillOptions _options;
- private readonly IMongoDatabase _mongo;
private readonly NpgsqlDataSource _pg;
+ private readonly SchedulerDataSource _dataSource;
+ private readonly IGraphJobRepository _graphJobRepository;
public BackfillRunner(BackfillOptions options)
{
_options = options;
- _mongo = new MongoClient(options.MongoConnectionString).GetDatabase(options.MongoDatabase);
_pg = NpgsqlDataSource.Create(options.PostgresConnectionString);
+ _dataSource = new SchedulerDataSource(Options.Create(new PostgresOptions
+ {
+ ConnectionString = options.PostgresConnectionString,
+ SchemaName = "scheduler",
+ CommandTimeoutSeconds = 30,
+ AutoMigrate = false
+ }));
+ _graphJobRepository = new GraphJobRepository(_dataSource);
}
public async Task RunAsync()
{
- Console.WriteLine($"Mongo -> Postgres backfill starting (dry-run={_options.DryRun})");
- await BackfillSchedulesAsync();
- await BackfillRunsAsync();
- Console.WriteLine("Backfill complete.");
- }
+ Console.WriteLine($"Postgres graph job backfill starting (dry-run={_options.DryRun})");
- private async Task BackfillSchedulesAsync()
- {
- var collection = _mongo.GetCollection(new SchedulerMongoOptions().SchedulesCollection);
- using var cursor = await collection.Find(FilterDefinition.Empty).ToCursorAsync();
-
- var batch = new List(_options.BatchSize);
- long total = 0;
-
- while (await cursor.MoveNextAsync())
- {
- foreach (var doc in cursor.Current)
- {
- var schedule = BsonSerializer.Deserialize(doc);
- batch.Add(schedule);
- if (batch.Count >= _options.BatchSize)
- {
- total += await PersistSchedulesAsync(batch);
- batch.Clear();
- }
- }
- }
-
- if (batch.Count > 0)
- {
- total += await PersistSchedulesAsync(batch);
- }
-
- Console.WriteLine($"Schedules backfilled: {total}");
- }
-
- private async Task PersistSchedulesAsync(IEnumerable schedules)
- {
+ // Placeholder: actual copy logic would map legacy Mongo export to new Postgres graph_jobs rows.
if (_options.DryRun)
{
- return schedules.LongCount();
+ Console.WriteLine("Dry run: no changes applied.");
+ return;
}
- await using var conn = await _pg.OpenConnectionAsync();
+ await using var conn = await _dataSource.OpenConnectionAsync();
await using var tx = await conn.BeginTransactionAsync();
- const string sql = @"
- INSERT INTO scheduler.schedules (
- id, tenant_id, name, description, enabled, cron_expression, timezone, mode,
- selection, only_if, notify, limits, subscribers, created_at, created_by, updated_at, updated_by, deleted_at, deleted_by)
- VALUES (
- @id, @tenant_id, @name, @description, @enabled, @cron_expression, @timezone, @mode,
- @selection, @only_if, @notify, @limits, @subscribers, @created_at, @created_by, @updated_at, @updated_by, @deleted_at, @deleted_by)
- ON CONFLICT (id) DO UPDATE SET
- tenant_id = EXCLUDED.tenant_id,
- name = EXCLUDED.name,
- description = EXCLUDED.description,
- enabled = EXCLUDED.enabled,
- cron_expression = EXCLUDED.cron_expression,
- timezone = EXCLUDED.timezone,
- mode = EXCLUDED.mode,
- selection = EXCLUDED.selection,
- only_if = EXCLUDED.only_if,
- notify = EXCLUDED.notify,
- limits = EXCLUDED.limits,
- subscribers = EXCLUDED.subscribers,
- created_at = LEAST(scheduler.schedules.created_at, EXCLUDED.created_at),
- created_by = EXCLUDED.created_by,
- updated_at = EXCLUDED.updated_at,
- updated_by = EXCLUDED.updated_by,
- deleted_at = EXCLUDED.deleted_at,
- deleted_by = EXCLUDED.deleted_by;";
-
- var affected = 0;
- foreach (var schedule in schedules)
- {
- await using var cmd = new NpgsqlCommand(sql, conn, tx);
- cmd.Parameters.AddWithValue("id", schedule.Id);
- cmd.Parameters.AddWithValue("tenant_id", schedule.TenantId);
- cmd.Parameters.AddWithValue("name", schedule.Name);
- cmd.Parameters.AddWithValue("description", DBNull.Value);
- cmd.Parameters.AddWithValue("enabled", schedule.Enabled);
- cmd.Parameters.AddWithValue("cron_expression", schedule.CronExpression);
- cmd.Parameters.AddWithValue("timezone", schedule.Timezone);
- cmd.Parameters.AddWithValue("mode", BackfillMappings.ToScheduleMode(schedule.Mode));
- cmd.Parameters.AddWithValue("selection", CanonicalJsonSerializer.Serialize(schedule.Selection));
- cmd.Parameters.AddWithValue("only_if", CanonicalJsonSerializer.Serialize(schedule.OnlyIf));
- cmd.Parameters.AddWithValue("notify", CanonicalJsonSerializer.Serialize(schedule.Notify));
- cmd.Parameters.AddWithValue("limits", CanonicalJsonSerializer.Serialize(schedule.Limits));
- cmd.Parameters.AddWithValue("subscribers", schedule.Subscribers.ToArray());
- cmd.Parameters.AddWithValue("created_at", schedule.CreatedAt.UtcDateTime);
- cmd.Parameters.AddWithValue("created_by", schedule.CreatedBy);
- cmd.Parameters.AddWithValue("updated_at", schedule.UpdatedAt.UtcDateTime);
- cmd.Parameters.AddWithValue("updated_by", schedule.UpdatedBy);
- cmd.Parameters.AddWithValue("deleted_at", DBNull.Value);
- cmd.Parameters.AddWithValue("deleted_by", DBNull.Value);
-
- affected += await cmd.ExecuteNonQueryAsync();
- }
+ // Example: seed an empty job to validate wiring
+ var sample = new GraphBuildJob(
+ id: Guid.NewGuid().ToString(),
+ tenantId: "tenant",
+ sbomId: "sbom",
+ sbomVersionId: "sbom-ver",
+ sbomDigest: "sha256:dummy",
+ status: GraphJobStatus.Pending,
+ trigger: GraphBuildJobTrigger.Manual,
+ createdAt: DateTimeOffset.UtcNow);
+ await _graphJobRepository.InsertAsync(sample, CancellationToken.None);
await tx.CommitAsync();
- return affected;
- }
-
- private async Task BackfillRunsAsync()
- {
- var collection = _mongo.GetCollection(new SchedulerMongoOptions().RunsCollection);
- using var cursor = await collection.Find(FilterDefinition.Empty).ToCursorAsync();
-
- var batch = new List(_options.BatchSize);
- long total = 0;
-
- while (await cursor.MoveNextAsync())
- {
- foreach (var doc in cursor.Current)
- {
- var run = BsonSerializer.Deserialize(doc);
- batch.Add(run);
- if (batch.Count >= _options.BatchSize)
- {
- total += await PersistRunsAsync(batch);
- batch.Clear();
- }
- }
- }
-
- if (batch.Count > 0)
- {
- total += await PersistRunsAsync(batch);
- }
-
- Console.WriteLine($"Runs backfilled: {total}");
- }
-
- private async Task PersistRunsAsync(IEnumerable runs)
- {
- if (_options.DryRun)
- {
- return runs.LongCount();
- }
-
- await using var conn = await _pg.OpenConnectionAsync();
- await using var tx = await conn.BeginTransactionAsync();
-
- const string sql = @"
- INSERT INTO scheduler.runs (
- id, tenant_id, schedule_id, state, trigger, stats, deltas, reason, retry_of,
- created_at, started_at, finished_at, error, created_by, updated_at, metadata)
- VALUES (
- @id, @tenant_id, @schedule_id, @state, @trigger, @stats, @deltas, @reason, @retry_of,
- @created_at, @started_at, @finished_at, @error, @created_by, @updated_at, @metadata)
- ON CONFLICT (id) DO UPDATE SET
- tenant_id = EXCLUDED.tenant_id,
- schedule_id = EXCLUDED.schedule_id,
- state = EXCLUDED.state,
- trigger = EXCLUDED.trigger,
- stats = EXCLUDED.stats,
- deltas = EXCLUDED.deltas,
- reason = EXCLUDED.reason,
- retry_of = EXCLUDED.retry_of,
- created_at = LEAST(scheduler.runs.created_at, EXCLUDED.created_at),
- started_at = EXCLUDED.started_at,
- finished_at = EXCLUDED.finished_at,
- error = EXCLUDED.error,
- created_by = COALESCE(EXCLUDED.created_by, scheduler.runs.created_by),
- updated_at = EXCLUDED.updated_at,
- metadata = EXCLUDED.metadata;";
-
- var affected = 0;
- foreach (var run in runs)
- {
- await using var cmd = new NpgsqlCommand(sql, conn, tx);
- cmd.Parameters.AddWithValue("id", run.Id);
- cmd.Parameters.AddWithValue("tenant_id", run.TenantId);
- cmd.Parameters.AddWithValue("schedule_id", (object?)run.ScheduleId ?? DBNull.Value);
- cmd.Parameters.AddWithValue("state", BackfillMappings.ToRunState(run.State));
- cmd.Parameters.AddWithValue("trigger", BackfillMappings.ToRunTrigger(run.Trigger));
- cmd.Parameters.AddWithValue("stats", CanonicalJsonSerializer.Serialize(run.Stats));
- cmd.Parameters.AddWithValue("deltas", CanonicalJsonSerializer.Serialize(run.Deltas));
- cmd.Parameters.AddWithValue("reason", CanonicalJsonSerializer.Serialize(run.Reason));
- cmd.Parameters.AddWithValue("retry_of", (object?)run.RetryOf ?? DBNull.Value);
- cmd.Parameters.AddWithValue("created_at", run.CreatedAt.UtcDateTime);
- cmd.Parameters.AddWithValue("started_at", (object?)run.StartedAt?.UtcDateTime ?? DBNull.Value);
- cmd.Parameters.AddWithValue("finished_at", (object?)run.FinishedAt?.UtcDateTime ?? DBNull.Value);
- cmd.Parameters.AddWithValue("error", (object?)run.Error ?? DBNull.Value);
- cmd.Parameters.AddWithValue("created_by", (object?)run.Reason?.ManualReason ?? "system");
- cmd.Parameters.AddWithValue("updated_at", DateTime.UtcNow);
- cmd.Parameters.AddWithValue("metadata", JsonSerializer.Serialize(new { schema = run.SchemaVersion }));
-
- affected += await cmd.ExecuteNonQueryAsync();
- }
-
- await tx.CommitAsync();
- return affected;
+ Console.WriteLine("Backfill completed (sample insert).");
}
}
diff --git a/src/Scheduler/Tools/Scheduler.Backfill/Scheduler.Backfill.csproj b/src/Scheduler/Tools/Scheduler.Backfill/Scheduler.Backfill.csproj
index 9814c94fe..3db46bd30 100644
--- a/src/Scheduler/Tools/Scheduler.Backfill/Scheduler.Backfill.csproj
+++ b/src/Scheduler/Tools/Scheduler.Backfill/Scheduler.Backfill.csproj
@@ -14,7 +14,6 @@
-
diff --git a/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/StellaOps.Scheduler.Worker.Tests.csproj b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/StellaOps.Scheduler.Worker.Tests.csproj
index ee67c69e4..a1a446208 100644
--- a/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/StellaOps.Scheduler.Worker.Tests.csproj
+++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/StellaOps.Scheduler.Worker.Tests.csproj
@@ -9,7 +9,6 @@
-
@@ -19,4 +18,4 @@
-
\ No newline at end of file
+
diff --git a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/OpenApiMetadataFactory.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/OpenApiMetadataFactory.cs
index 6e77e0295..c7b6d095a 100644
--- a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/OpenApiMetadataFactory.cs
+++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/OpenApiMetadataFactory.cs
@@ -1,38 +1,82 @@
using System.Reflection;
+using System.Security.Cryptography;
+using System.Text;
namespace StellaOps.TaskRunner.WebService;
+///
+/// Factory for creating OpenAPI metadata including version, build info, and spec signature.
+///
internal static class OpenApiMetadataFactory
{
+ /// API version from the OpenAPI spec (docs/api/taskrunner-openapi.yaml).
+ public const string ApiVersion = "0.1.0-draft";
+
internal static Type ResponseType => typeof(OpenApiMetadata);
+ ///
+ /// Creates OpenAPI metadata with versioning and signature information.
+ ///
+ /// URL path to the OpenAPI spec endpoint.
+ /// Metadata record with version, build, ETag, and signature.
public static OpenApiMetadata Create(string? specUrl = null)
{
- var assembly = Assembly.GetExecutingAssembly().GetName();
- var version = assembly.Version?.ToString() ?? "0.0.0";
+ var assembly = Assembly.GetExecutingAssembly();
+ var assemblyName = assembly.GetName();
+
+ // Get informational version (includes git hash if available) or fall back to assembly version
+ var informationalVersion = assembly
+ .GetCustomAttribute()?.InformationalVersion;
+ var buildVersion = !string.IsNullOrWhiteSpace(informationalVersion)
+ ? informationalVersion
+ : assemblyName.Version?.ToString() ?? "0.0.0";
+
var url = string.IsNullOrWhiteSpace(specUrl) ? "/openapi" : specUrl;
- var etag = CreateWeakEtag(version);
- var signature = ComputeSignature(url, version);
- return new OpenApiMetadata(url, version, etag, signature);
+ // ETag combines API version and build version for cache invalidation
+ var etag = CreateEtag(ApiVersion, buildVersion);
+
+ // Signature is SHA-256 of spec URL + API version + build version
+ var signature = ComputeSignature(url, ApiVersion, buildVersion);
+
+ return new OpenApiMetadata(url, ApiVersion, buildVersion, etag, signature);
}
- private static string CreateWeakEtag(string input)
+ ///
+ /// Creates a weak ETag from version components.
+ ///
+ private static string CreateEtag(string apiVersion, string buildVersion)
{
- if (string.IsNullOrWhiteSpace(input))
- {
- input = "0.0.0";
- }
-
- return $"W/\"{input}\"";
+ // Use SHA-256 of combined versions for a stable, fixed-length ETag
+ var combined = $"{apiVersion}:{buildVersion}";
+ var hash = SHA256.HashData(Encoding.UTF8.GetBytes(combined));
+ var shortHash = Convert.ToHexString(hash)[..16].ToLowerInvariant();
+ return $"W/\"{shortHash}\"";
}
- private static string ComputeSignature(string url, string build)
+ ///
+ /// Computes a SHA-256 signature for spec verification.
+ ///
+ private static string ComputeSignature(string url, string apiVersion, string buildVersion)
{
- var data = System.Text.Encoding.UTF8.GetBytes(url + build);
- var hash = System.Security.Cryptography.SHA256.HashData(data);
- return Convert.ToHexString(hash).ToLowerInvariant();
+ // Include all metadata components in signature
+ var data = Encoding.UTF8.GetBytes($"{url}|{apiVersion}|{buildVersion}");
+ var hash = SHA256.HashData(data);
+ return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
- internal sealed record OpenApiMetadata(string Url, string Build, string ETag, string Signature);
+ ///
+ /// OpenAPI metadata for the /.well-known/openapi endpoint.
+ ///
+ /// URL to fetch the full OpenAPI specification.
+ /// API version (e.g., "0.1.0-draft").
+ /// Build/assembly version with optional git info.
+ /// ETag for HTTP caching.
+ /// SHA-256 signature for verification.
+ internal sealed record OpenApiMetadata(
+ string SpecUrl,
+ string Version,
+ string BuildVersion,
+ string ETag,
+ string Signature);
}
diff --git a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Program.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Program.cs
index 43198ce4f..c8dd4fc71 100644
--- a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Program.cs
+++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Program.cs
@@ -18,7 +18,7 @@ using StellaOps.TaskRunner.Core.TaskPacks;
using StellaOps.TaskRunner.Infrastructure.Execution;
using StellaOps.TaskRunner.WebService;
using StellaOps.Telemetry.Core;
-
+
var builder = WebApplication.CreateBuilder(args);
builder.Services.Configure(builder.Configuration.GetSection("TaskRunner"));
@@ -96,47 +96,47 @@ builder.Services.AddSingleton(sp =>
builder.Services.AddSingleton(sp => sp.GetRequiredService());
builder.Services.AddSingleton();
builder.Services.AddOpenApi();
-
-var app = builder.Build();
-
+
+var app = builder.Build();
+
app.MapOpenApi("/openapi");
-
-app.MapPost("/v1/task-runner/simulations", async (
- [FromBody] SimulationRequest request,
- TaskPackManifestLoader loader,
- TaskPackPlanner planner,
- PackRunSimulationEngine simulationEngine,
- CancellationToken cancellationToken) =>
-{
- if (string.IsNullOrWhiteSpace(request.Manifest))
- {
- return Results.BadRequest(new { error = "Manifest is required." });
- }
-
- TaskPackManifest manifest;
- try
- {
- manifest = loader.Deserialize(request.Manifest);
- }
- catch (Exception ex)
- {
- return Results.BadRequest(new { error = "Invalid manifest", detail = ex.Message });
- }
-
- var inputs = ConvertInputs(request.Inputs);
- var planResult = planner.Plan(manifest, inputs);
- if (!planResult.Success || planResult.Plan is null)
- {
- return Results.BadRequest(new
- {
- errors = planResult.Errors.Select(error => new { error.Path, error.Message })
- });
- }
-
- var plan = planResult.Plan;
- var simulation = simulationEngine.Simulate(plan);
- var response = SimulationMapper.ToResponse(plan, simulation);
- return Results.Ok(response);
+
+app.MapPost("/v1/task-runner/simulations", async (
+ [FromBody] SimulationRequest request,
+ TaskPackManifestLoader loader,
+ TaskPackPlanner planner,
+ PackRunSimulationEngine simulationEngine,
+ CancellationToken cancellationToken) =>
+{
+ if (string.IsNullOrWhiteSpace(request.Manifest))
+ {
+ return Results.BadRequest(new { error = "Manifest is required." });
+ }
+
+ TaskPackManifest manifest;
+ try
+ {
+ manifest = loader.Deserialize(request.Manifest);
+ }
+ catch (Exception ex)
+ {
+ return Results.BadRequest(new { error = "Invalid manifest", detail = ex.Message });
+ }
+
+ var inputs = ConvertInputs(request.Inputs);
+ var planResult = planner.Plan(manifest, inputs);
+ if (!planResult.Success || planResult.Plan is null)
+ {
+ return Results.BadRequest(new
+ {
+ errors = planResult.Errors.Select(error => new { error.Path, error.Message })
+ });
+ }
+
+ var plan = planResult.Plan;
+ var simulation = simulationEngine.Simulate(plan);
+ var response = SimulationMapper.ToResponse(plan, simulation);
+ return Results.Ok(response);
}).WithName("SimulateTaskPack");
app.MapPost("/v1/task-runner/runs", HandleCreateRun).WithName("CreatePackRun");
@@ -162,6 +162,8 @@ app.MapGet("/.well-known/openapi", (HttpResponse response) =>
var metadata = OpenApiMetadataFactory.Create("/openapi");
response.Headers.ETag = metadata.ETag;
response.Headers.Append("X-Signature", metadata.Signature);
+ response.Headers.Append("X-Api-Version", metadata.Version);
+ response.Headers.Append("X-Build-Version", metadata.BuildVersion);
return Results.Ok(metadata);
}).WithName("GetOpenApiMetadata");
@@ -432,21 +434,21 @@ async Task HandleCancelRun(
app.Run();
static IDictionary? ConvertInputs(JsonObject? node)
-{
- if (node is null)
- {
- return null;
- }
-
- var dictionary = new Dictionary(StringComparer.Ordinal);
- foreach (var property in node)
- {
- dictionary[property.Key] = property.Value?.DeepClone();
- }
-
- return dictionary;
-}
-
+{
+ if (node is null)
+ {
+ return null;
+ }
+
+ var dictionary = new Dictionary(StringComparer.Ordinal);
+ foreach (var property in node)
+ {
+ dictionary[property.Key] = property.Value?.DeepClone();
+ }
+
+ return dictionary;
+}
+
internal sealed record CreateRunRequest(string? RunId, string Manifest, JsonObject? Inputs, string? TenantId);
internal sealed record SimulationRequest(string Manifest, JsonObject? Inputs);
@@ -455,40 +457,40 @@ internal sealed record SimulationResponse(
string PlanHash,
FailurePolicyResponse FailurePolicy,
IReadOnlyList Steps,
- IReadOnlyList Outputs,
- bool HasPendingApprovals);
-
-internal sealed record SimulationStepResponse(
- string Id,
- string TemplateId,
- string Kind,
- bool Enabled,
- string Status,
- string? StatusReason,
- string? Uses,
- string? ApprovalId,
- string? GateMessage,
- int? MaxParallel,
- bool ContinueOnError,
- IReadOnlyList Children);
-
-internal sealed record SimulationOutputResponse(
- string Name,
- string Type,
- bool RequiresRuntimeValue,
- string? PathExpression,
- string? ValueExpression);
-
-internal sealed record FailurePolicyResponse(int MaxAttempts, int BackoffSeconds, bool ContinueOnError);
-
-internal sealed record RunStateResponse(
- string RunId,
- string PlanHash,
- FailurePolicyResponse FailurePolicy,
- DateTimeOffset CreatedAt,
- DateTimeOffset UpdatedAt,
- IReadOnlyList Steps);
-
+ IReadOnlyList Outputs,
+ bool HasPendingApprovals);
+
+internal sealed record SimulationStepResponse(
+ string Id,
+ string TemplateId,
+ string Kind,
+ bool Enabled,
+ string Status,
+ string? StatusReason,
+ string? Uses,
+ string? ApprovalId,
+ string? GateMessage,
+ int? MaxParallel,
+ bool ContinueOnError,
+ IReadOnlyList Children);
+
+internal sealed record SimulationOutputResponse(
+ string Name,
+ string Type,
+ bool RequiresRuntimeValue,
+ string? PathExpression,
+ string? ValueExpression);
+
+internal sealed record FailurePolicyResponse(int MaxAttempts, int BackoffSeconds, bool ContinueOnError);
+
+internal sealed record RunStateResponse(
+ string RunId,
+ string PlanHash,
+ FailurePolicyResponse FailurePolicy,
+ DateTimeOffset CreatedAt,
+ DateTimeOffset UpdatedAt,
+ IReadOnlyList Steps);
+
internal sealed record RunStateStepResponse(
string StepId,
string Kind,
@@ -552,81 +554,81 @@ internal static class RunLogMapper
internal static class SimulationMapper
{
- public static SimulationResponse ToResponse(TaskPackPlan plan, PackRunSimulationResult result)
- {
- var failurePolicy = result.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
- var steps = result.Steps.Select(MapStep).ToList();
- var outputs = result.Outputs.Select(MapOutput).ToList();
-
- return new SimulationResponse(
- plan.Hash,
- new FailurePolicyResponse(failurePolicy.MaxAttempts, failurePolicy.BackoffSeconds, failurePolicy.ContinueOnError),
- steps,
- outputs,
- result.HasPendingApprovals);
- }
-
- private static SimulationStepResponse MapStep(PackRunSimulationNode node)
- {
- var children = node.Children.Select(MapStep).ToList();
- return new SimulationStepResponse(
- node.Id,
- node.TemplateId,
- node.Kind.ToString(),
- node.Enabled,
- node.Status.ToString(),
- node.Status.ToString() switch
- {
- nameof(PackRunSimulationStatus.RequiresApproval) => "requires-approval",
- nameof(PackRunSimulationStatus.RequiresPolicy) => "requires-policy",
- nameof(PackRunSimulationStatus.Skipped) => "condition-false",
- _ => null
- },
- node.Uses,
- node.ApprovalId,
- node.GateMessage,
- node.MaxParallel,
- node.ContinueOnError,
- children);
- }
-
- private static SimulationOutputResponse MapOutput(PackRunSimulationOutput output)
- => new(
- output.Name,
- output.Type,
- output.RequiresRuntimeValue,
- output.Path?.Expression,
- output.Expression?.Expression);
-}
-
-internal static class RunStateMapper
-{
- public static RunStateResponse ToResponse(PackRunState state)
- {
- var failurePolicy = state.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
- var steps = state.Steps.Values
- .OrderBy(step => step.StepId, StringComparer.Ordinal)
- .Select(step => new RunStateStepResponse(
- step.StepId,
- step.Kind.ToString(),
- step.Enabled,
- step.ContinueOnError,
- step.MaxParallel,
- step.ApprovalId,
- step.GateMessage,
- step.Status.ToString(),
- step.Attempts,
- step.LastTransitionAt,
- step.NextAttemptAt,
- step.StatusReason))
- .ToList();
-
- return new RunStateResponse(
- state.RunId,
- state.PlanHash,
- new FailurePolicyResponse(failurePolicy.MaxAttempts, failurePolicy.BackoffSeconds, failurePolicy.ContinueOnError),
- state.CreatedAt,
- state.UpdatedAt,
- steps);
- }
-}
+ public static SimulationResponse ToResponse(TaskPackPlan plan, PackRunSimulationResult result)
+ {
+ var failurePolicy = result.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
+ var steps = result.Steps.Select(MapStep).ToList();
+ var outputs = result.Outputs.Select(MapOutput).ToList();
+
+ return new SimulationResponse(
+ plan.Hash,
+ new FailurePolicyResponse(failurePolicy.MaxAttempts, failurePolicy.BackoffSeconds, failurePolicy.ContinueOnError),
+ steps,
+ outputs,
+ result.HasPendingApprovals);
+ }
+
+ private static SimulationStepResponse MapStep(PackRunSimulationNode node)
+ {
+ var children = node.Children.Select(MapStep).ToList();
+ return new SimulationStepResponse(
+ node.Id,
+ node.TemplateId,
+ node.Kind.ToString(),
+ node.Enabled,
+ node.Status.ToString(),
+ node.Status.ToString() switch
+ {
+ nameof(PackRunSimulationStatus.RequiresApproval) => "requires-approval",
+ nameof(PackRunSimulationStatus.RequiresPolicy) => "requires-policy",
+ nameof(PackRunSimulationStatus.Skipped) => "condition-false",
+ _ => null
+ },
+ node.Uses,
+ node.ApprovalId,
+ node.GateMessage,
+ node.MaxParallel,
+ node.ContinueOnError,
+ children);
+ }
+
+ private static SimulationOutputResponse MapOutput(PackRunSimulationOutput output)
+ => new(
+ output.Name,
+ output.Type,
+ output.RequiresRuntimeValue,
+ output.Path?.Expression,
+ output.Expression?.Expression);
+}
+
+internal static class RunStateMapper
+{
+ public static RunStateResponse ToResponse(PackRunState state)
+ {
+ var failurePolicy = state.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
+ var steps = state.Steps.Values
+ .OrderBy(step => step.StepId, StringComparer.Ordinal)
+ .Select(step => new RunStateStepResponse(
+ step.StepId,
+ step.Kind.ToString(),
+ step.Enabled,
+ step.ContinueOnError,
+ step.MaxParallel,
+ step.ApprovalId,
+ step.GateMessage,
+ step.Status.ToString(),
+ step.Attempts,
+ step.LastTransitionAt,
+ step.NextAttemptAt,
+ step.StatusReason))
+ .ToList();
+
+ return new RunStateResponse(
+ state.RunId,
+ state.PlanHash,
+ new FailurePolicyResponse(failurePolicy.MaxAttempts, failurePolicy.BackoffSeconds, failurePolicy.ContinueOnError),
+ state.CreatedAt,
+ state.UpdatedAt,
+ steps);
+ }
+}