feat(api): Add Policy Registry API specification
Some checks failed
AOC Guard CI / aoc-verify (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
mock-dev-release / package-mock-release (push) Has been cancelled

- Introduced OpenAPI specification for the StellaOps Policy Registry API, covering endpoints for verification policies, policy packs, snapshots, violations, overrides, sealed mode operations, and advisory staleness tracking.
- Defined schemas, parameters, and responses for comprehensive API documentation.

chore(scanner): Add global usings for scanner analyzers

- Created GlobalUsings.cs to simplify namespace usage across analyzer libraries.

feat(scanner): Implement Surface Service Collection Extensions

- Added SurfaceServiceCollectionExtensions for dependency injection registration of surface analysis services.
- Included methods for adding surface analysis, surface collectors, and entry point collectors to the service collection.
This commit is contained in:
StellaOps Bot
2025-12-06 20:52:23 +02:00
parent 05597616d6
commit f6c22854a4
37 changed files with 5664 additions and 1263 deletions

View File

@@ -15,7 +15,10 @@
"Bash(Select-Object -ExpandProperty FullName)",
"Bash(echo:*)",
"Bash(Out-File -FilePath \"E:\\dev\\git.stella-ops.org\\src\\Scanner\\__Libraries\\StellaOps.Scanner.Surface\\StellaOps.Scanner.Surface.csproj\" -Encoding utf8)",
"Bash(wc:*)"
"Bash(wc:*)",
"Bash(find:*)",
"WebFetch(domain:docs.gradle.org)",
"WebSearch"
],
"deny": [],
"ask": []

View File

@@ -23,8 +23,18 @@ jobs:
cp deploy/downloads/manifest.json out/mock-release/
tar -czf out/mock-release/mock-dev-release.tgz -C out/mock-release .
- name: Compose config (dev + mock overlay)
run: |
set -euo pipefail
cd deploy/compose
docker compose --env-file env/dev.env.example --env-file env/mock.env.example \
-f docker-compose.dev.yaml -f docker-compose.mock.yaml config > /tmp/compose-mock-config.yaml
ls -lh /tmp/compose-mock-config.yaml
- name: Upload mock release bundle
uses: actions/upload-artifact@v3
with:
name: mock-dev-release
path: out/mock-release/mock-dev-release.tgz
path: |
out/mock-release/mock-dev-release.tgz
/tmp/compose-mock-config.yaml

View File

@@ -0,0 +1,886 @@
# OpenAPI 3.1 specification for StellaOps TaskRunner WebService
openapi: 3.1.0
info:
title: StellaOps TaskRunner API
version: 0.1.0-draft
description: |
Contract for TaskRunner service covering pack runs, simulations, logs, artifacts, and approvals.
Uses the platform error envelope and tenant header `X-StellaOps-Tenant`.
## Streaming Endpoints
The `/runs/{runId}/logs` endpoint returns logs in NDJSON (Newline Delimited JSON) format
for efficient streaming. Each line is a complete JSON object.
## Control Flow Steps
TaskPacks support the following step kinds:
- **run**: Execute an action using a builtin or custom executor
- **parallel**: Execute child steps concurrently with optional maxParallel limit
- **map**: Iterate over items and execute a template step for each
- **loop**: Iterate with items expression, range, or static list
- **conditional**: Branch based on condition expressions
- **gate.approval**: Require manual approval before proceeding
- **gate.policy**: Evaluate policy and optionally require override approval
servers:
- url: https://taskrunner.stellaops.example.com
description: Production
- url: https://taskrunner.dev.stellaops.example.com
description: Development
security:
- oauth2: [taskrunner.viewer]
- oauth2: [taskrunner.operator]
- oauth2: [taskrunner.admin]
paths:
/v1/task-runner/simulations:
post:
summary: Simulate a task pack
description: |
Validates a task pack manifest, creates an execution plan, and simulates the run
without actually executing any steps. Returns the simulation result showing which
steps would execute, which are skipped, and which require approvals.
operationId: simulateTaskPack
tags: [Simulations]
parameters:
- $ref: '#/components/parameters/Tenant'
requestBody:
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/SimulationRequest'
examples:
basic-simulation:
summary: Basic simulation request
value:
manifest: |
apiVersion: stellaops.io/pack.v1
kind: TaskPack
metadata:
name: scan-deploy
version: 1.0.0
spec:
inputs:
- name: target
type: string
required: true
sandbox:
mode: sealed
egressAllowlist: []
cpuLimitMillicores: 100
memoryLimitMiB: 128
quotaSeconds: 60
slo:
runP95Seconds: 300
approvalP95Seconds: 900
maxQueueDepth: 100
steps:
- id: scan
run:
uses: builtin:scanner
with:
target: "{{ inputs.target }}"
inputs:
target: "registry.example.com/app:v1.2.3"
responses:
'200':
description: Simulation completed
content:
application/json:
schema:
$ref: '#/components/schemas/SimulationResponse'
examples:
simulation-result:
value:
planHash: "sha256:a1b2c3d4e5f6..."
failurePolicy:
maxAttempts: 1
backoffSeconds: 0
continueOnError: false
steps:
- id: scan
templateId: scan
kind: Run
enabled: true
status: Pending
uses: "builtin:scanner"
children: []
outputs: []
hasPendingApprovals: false
'400':
description: Invalid manifest or inputs
content:
application/json:
schema:
$ref: '#/components/schemas/PlanErrorResponse'
default:
$ref: '#/components/responses/Error'
/v1/task-runner/runs:
post:
summary: Create a pack run
description: |
Creates a new pack run from a task pack manifest. The run is scheduled for execution
and will proceed through its steps. If approval gates are present, the run will pause
at those gates until approvals are granted.
operationId: createPackRun
tags: [Runs]
parameters:
- $ref: '#/components/parameters/Tenant'
requestBody:
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/CreateRunRequest'
examples:
create-run:
summary: Create a new run
value:
runId: "run-20251206-001"
manifest: |
apiVersion: stellaops.io/pack.v1
kind: TaskPack
metadata:
name: deploy-app
version: 2.0.0
spec:
sandbox:
mode: sealed
egressAllowlist: []
cpuLimitMillicores: 200
memoryLimitMiB: 256
quotaSeconds: 120
slo:
runP95Seconds: 600
approvalP95Seconds: 1800
maxQueueDepth: 50
approvals:
- id: security-review
grants: [packs.approve]
steps:
- id: build
run:
uses: builtin:build
- id: approval
gate:
approval:
id: security-review
message: "Security review required before deploy"
- id: deploy
run:
uses: builtin:deploy
tenantId: "tenant-prod"
responses:
'201':
description: Run created
headers:
Location:
description: URL of the created run
schema:
type: string
content:
application/json:
schema:
$ref: '#/components/schemas/RunStateResponse'
'400':
description: Invalid manifest or inputs
content:
application/json:
schema:
$ref: '#/components/schemas/PlanErrorResponse'
'409':
description: Run ID already exists
content:
application/json:
schema:
$ref: '#/components/schemas/ErrorEnvelope'
default:
$ref: '#/components/responses/Error'
/v1/task-runner/runs/{runId}:
get:
summary: Get run state
description: |
Returns the current state of a pack run, including status of all steps,
failure policy, and timing information.
operationId: getRunState
tags: [Runs]
parameters:
- $ref: '#/components/parameters/Tenant'
- $ref: '#/components/parameters/RunId'
responses:
'200':
description: Run state
content:
application/json:
schema:
$ref: '#/components/schemas/RunStateResponse'
examples:
running:
summary: Run in progress
value:
runId: "run-20251206-001"
planHash: "sha256:a1b2c3d4..."
failurePolicy:
maxAttempts: 2
backoffSeconds: 30
continueOnError: false
createdAt: "2025-12-06T10:00:00Z"
updatedAt: "2025-12-06T10:05:00Z"
steps:
- stepId: build
kind: Run
enabled: true
continueOnError: false
status: Succeeded
attempts: 1
lastTransitionAt: "2025-12-06T10:02:00Z"
- stepId: approval
kind: GateApproval
enabled: true
continueOnError: false
approvalId: security-review
gateMessage: "Security review required before deploy"
status: Pending
attempts: 0
statusReason: "awaiting-approval"
- stepId: deploy
kind: Run
enabled: true
continueOnError: false
status: Pending
attempts: 0
'404':
description: Run not found
default:
$ref: '#/components/responses/Error'
/v1/task-runner/runs/{runId}/logs:
get:
summary: Stream run logs
description: |
Returns run logs as a stream of NDJSON (Newline Delimited JSON) entries.
Each line is a complete JSON object representing a log entry with timestamp,
level, event type, message, and optional metadata.
**Content-Type**: `application/x-ndjson`
operationId: streamRunLogs
tags: [Logs]
parameters:
- $ref: '#/components/parameters/Tenant'
- $ref: '#/components/parameters/RunId'
responses:
'200':
description: Log stream
content:
application/x-ndjson:
schema:
$ref: '#/components/schemas/RunLogEntry'
examples:
log-stream:
summary: Sample NDJSON log stream
value: |
{"timestamp":"2025-12-06T10:00:00Z","level":"info","eventType":"run.created","message":"Run created via API.","metadata":{"planHash":"sha256:a1b2c3d4...","requestedAt":"2025-12-06T10:00:00Z"}}
{"timestamp":"2025-12-06T10:00:01Z","level":"info","eventType":"step.started","message":"Starting step: build","stepId":"build"}
{"timestamp":"2025-12-06T10:02:00Z","level":"info","eventType":"step.completed","message":"Step completed: build","stepId":"build","metadata":{"duration":"119s"}}
{"timestamp":"2025-12-06T10:02:01Z","level":"warn","eventType":"gate.awaiting","message":"Awaiting approval: security-review","stepId":"approval"}
'404':
description: Run not found
default:
$ref: '#/components/responses/Error'
/v1/task-runner/runs/{runId}/artifacts:
get:
summary: List run artifacts
description: |
Returns a list of artifacts captured during the run, including file outputs,
evidence bundles, and expression-evaluated results.
operationId: listRunArtifacts
tags: [Artifacts]
parameters:
- $ref: '#/components/parameters/Tenant'
- $ref: '#/components/parameters/RunId'
responses:
'200':
description: Artifact list
content:
application/json:
schema:
type: array
items:
$ref: '#/components/schemas/RunArtifact'
examples:
artifacts:
value:
- name: scan-report
type: file
sourcePath: "/output/scan-report.json"
storedPath: "runs/run-20251206-001/artifacts/scan-report.json"
status: captured
capturedAt: "2025-12-06T10:02:00Z"
- name: evidence-bundle
type: object
status: captured
capturedAt: "2025-12-06T10:02:00Z"
expressionJson: '{"sha256":"abc123...","attestations":[...]}'
'404':
description: Run not found
default:
$ref: '#/components/responses/Error'
/v1/task-runner/runs/{runId}/approvals/{approvalId}:
post:
summary: Apply approval decision
description: |
Applies an approval decision (approved, rejected, or expired) to a pending
approval gate. The planHash must match to prevent approving a stale plan.
If approved, the run will resume execution. If rejected, the run will fail
at the gate step.
operationId: applyApprovalDecision
tags: [Approvals]
parameters:
- $ref: '#/components/parameters/Tenant'
- $ref: '#/components/parameters/RunId'
- $ref: '#/components/parameters/ApprovalId'
requestBody:
required: true
content:
application/json:
schema:
$ref: '#/components/schemas/ApprovalDecisionRequest'
examples:
approve:
summary: Approve the gate
value:
decision: approved
planHash: "sha256:a1b2c3d4e5f678901234567890abcdef1234567890abcdef1234567890abcdef"
actorId: "user:alice@example.com"
summary: "Reviewed and approved for production deployment"
reject:
summary: Reject the gate
value:
decision: rejected
planHash: "sha256:a1b2c3d4e5f678901234567890abcdef1234567890abcdef1234567890abcdef"
actorId: "user:bob@example.com"
summary: "Security scan found critical vulnerabilities"
responses:
'200':
description: Decision applied
content:
application/json:
schema:
$ref: '#/components/schemas/ApprovalDecisionResponse'
examples:
approved:
value:
status: approved
resumed: true
'400':
description: Invalid decision or planHash format
content:
application/json:
schema:
$ref: '#/components/schemas/ErrorEnvelope'
'404':
description: Run or approval not found
'409':
description: Plan hash mismatch
content:
application/json:
schema:
$ref: '#/components/schemas/ErrorEnvelope'
default:
$ref: '#/components/responses/Error'
/v1/task-runner/runs/{runId}/cancel:
post:
summary: Cancel a run
description: |
Requests cancellation of a run. Remaining pending steps will be marked as
skipped. Steps that have already succeeded or been skipped are not affected.
operationId: cancelRun
tags: [Runs]
parameters:
- $ref: '#/components/parameters/Tenant'
- $ref: '#/components/parameters/RunId'
responses:
'202':
description: Cancellation accepted
headers:
Location:
description: URL of the run
schema:
type: string
content:
application/json:
schema:
type: object
properties:
status:
type: string
enum: [cancelled]
'404':
description: Run not found
default:
$ref: '#/components/responses/Error'
/.well-known/openapi:
get:
summary: Get OpenAPI metadata
description: |
Returns metadata about the OpenAPI specification including the spec URL,
ETag for caching, and a signature for verification.
operationId: getOpenApiMetadata
tags: [Metadata]
responses:
'200':
description: OpenAPI metadata
headers:
ETag:
description: Spec version ETag
schema:
type: string
X-Signature:
description: Spec signature for verification
schema:
type: string
content:
application/json:
schema:
$ref: '#/components/schemas/OpenApiMetadata'
examples:
metadata:
value:
specUrl: "/openapi"
version: "0.1.0-draft"
buildVersion: "20251206.1"
etag: '"abc123"'
signature: "sha256:def456..."
components:
securitySchemes:
oauth2:
type: oauth2
flows:
clientCredentials:
tokenUrl: https://auth.stellaops.example.com/oauth/token
scopes:
taskrunner.viewer: Read-only access to runs and logs
taskrunner.operator: Create runs and apply approvals
taskrunner.admin: Full administrative access
parameters:
Tenant:
name: X-StellaOps-Tenant
in: header
required: false
description: Tenant slug (optional for single-tenant deployments)
schema:
type: string
RunId:
name: runId
in: path
required: true
description: Unique run identifier
schema:
type: string
pattern: '^[a-zA-Z0-9_-]+$'
ApprovalId:
name: approvalId
in: path
required: true
description: Approval gate identifier (from task pack approvals section)
schema:
type: string
responses:
Error:
description: Standard error envelope
content:
application/json:
schema:
$ref: '#/components/schemas/ErrorEnvelope'
examples:
internal-error:
value:
error:
code: internal_error
message: "An unexpected error occurred"
traceId: "f62f3c2b9c8e4c53"
schemas:
ErrorEnvelope:
type: object
required: [error]
properties:
error:
type: object
required: [code, message]
properties:
code:
type: string
description: Machine-readable error code
message:
type: string
description: Human-readable error message
traceId:
type: string
description: Trace ID for debugging
SimulationRequest:
type: object
required: [manifest]
properties:
manifest:
type: string
description: Task pack manifest in YAML format
inputs:
type: object
additionalProperties: true
description: Input values to provide to the task pack
SimulationResponse:
type: object
required: [planHash, failurePolicy, steps, outputs, hasPendingApprovals]
properties:
planHash:
type: string
description: SHA-256 hash of the execution plan
pattern: '^sha256:[a-f0-9]{64}$'
failurePolicy:
$ref: '#/components/schemas/FailurePolicy'
steps:
type: array
items:
$ref: '#/components/schemas/SimulationStep'
outputs:
type: array
items:
$ref: '#/components/schemas/SimulationOutput'
hasPendingApprovals:
type: boolean
description: Whether the plan contains approval gates
SimulationStep:
type: object
required: [id, templateId, kind, enabled, status, children]
properties:
id:
type: string
templateId:
type: string
kind:
type: string
enum: [Run, GateApproval, GatePolicy, Parallel, Map, Loop, Conditional, Unknown]
enabled:
type: boolean
status:
type: string
enum: [Pending, Skipped, RequiresApproval, RequiresPolicy, WillIterate, WillBranch]
statusReason:
type: string
uses:
type: string
description: Executor reference for run steps
approvalId:
type: string
gateMessage:
type: string
maxParallel:
type: integer
continueOnError:
type: boolean
children:
type: array
items:
$ref: '#/components/schemas/SimulationStep'
loopInfo:
$ref: '#/components/schemas/LoopInfo'
conditionalInfo:
$ref: '#/components/schemas/ConditionalInfo'
policyInfo:
$ref: '#/components/schemas/PolicyInfo'
LoopInfo:
type: object
description: Loop step simulation details
properties:
itemsExpression:
type: string
iterator:
type: string
index:
type: string
maxIterations:
type: integer
aggregationMode:
type: string
enum: [collect, merge, last, first, none]
ConditionalInfo:
type: object
description: Conditional step simulation details
properties:
branches:
type: array
items:
type: object
properties:
condition:
type: string
stepCount:
type: integer
elseStepCount:
type: integer
outputUnion:
type: boolean
PolicyInfo:
type: object
description: Policy gate simulation details
properties:
policyId:
type: string
policyVersion:
type: string
failureAction:
type: string
enum: [abort, warn, requestOverride, branch]
retryCount:
type: integer
SimulationOutput:
type: object
required: [name, type, requiresRuntimeValue]
properties:
name:
type: string
type:
type: string
requiresRuntimeValue:
type: boolean
pathExpression:
type: string
valueExpression:
type: string
CreateRunRequest:
type: object
required: [manifest]
properties:
runId:
type: string
description: Optional custom run ID (auto-generated if not provided)
manifest:
type: string
description: Task pack manifest in YAML format
inputs:
type: object
additionalProperties: true
description: Input values to provide to the task pack
tenantId:
type: string
description: Tenant identifier
RunStateResponse:
type: object
required: [runId, planHash, failurePolicy, createdAt, updatedAt, steps]
properties:
runId:
type: string
planHash:
type: string
pattern: '^sha256:[a-f0-9]{64}$'
failurePolicy:
$ref: '#/components/schemas/FailurePolicy'
createdAt:
type: string
format: date-time
updatedAt:
type: string
format: date-time
steps:
type: array
items:
$ref: '#/components/schemas/RunStateStep'
RunStateStep:
type: object
required: [stepId, kind, enabled, continueOnError, status, attempts]
properties:
stepId:
type: string
kind:
type: string
enum: [Run, GateApproval, GatePolicy, Parallel, Map, Loop, Conditional, Unknown]
enabled:
type: boolean
continueOnError:
type: boolean
maxParallel:
type: integer
approvalId:
type: string
gateMessage:
type: string
status:
type: string
enum: [Pending, Running, Succeeded, Failed, Skipped]
attempts:
type: integer
lastTransitionAt:
type: string
format: date-time
nextAttemptAt:
type: string
format: date-time
statusReason:
type: string
FailurePolicy:
type: object
required: [maxAttempts, backoffSeconds, continueOnError]
properties:
maxAttempts:
type: integer
minimum: 1
backoffSeconds:
type: integer
minimum: 0
continueOnError:
type: boolean
RunLogEntry:
type: object
required: [timestamp, level, eventType, message]
description: |
Log entry returned in NDJSON stream. Each entry is a single JSON object
followed by a newline character.
properties:
timestamp:
type: string
format: date-time
level:
type: string
enum: [debug, info, warn, error]
eventType:
type: string
description: |
Event type identifier, e.g.:
- run.created, run.started, run.completed, run.failed, run.cancelled
- step.started, step.completed, step.failed, step.skipped
- gate.awaiting, gate.approved, gate.rejected
- run.schedule-failed, run.cancel-requested
message:
type: string
stepId:
type: string
metadata:
type: object
additionalProperties:
type: string
RunArtifact:
type: object
required: [name, type, status]
properties:
name:
type: string
type:
type: string
enum: [file, object]
sourcePath:
type: string
storedPath:
type: string
status:
type: string
enum: [pending, captured, failed]
notes:
type: string
capturedAt:
type: string
format: date-time
expressionJson:
type: string
description: JSON string of evaluated expression result for object outputs
ApprovalDecisionRequest:
type: object
required: [decision, planHash]
properties:
decision:
type: string
enum: [approved, rejected, expired]
planHash:
type: string
pattern: '^sha256:[a-f0-9]{64}$'
description: Plan hash to verify against (must match current run plan)
actorId:
type: string
description: Identifier of the approver (e.g., user:alice@example.com)
summary:
type: string
description: Optional comment explaining the decision
ApprovalDecisionResponse:
type: object
required: [status, resumed]
properties:
status:
type: string
enum: [approved, rejected, expired]
resumed:
type: boolean
description: Whether the run was resumed (true for approved decisions)
PlanErrorResponse:
type: object
required: [errors]
properties:
errors:
type: array
items:
type: object
required: [path, message]
properties:
path:
type: string
description: JSON path to the error location
message:
type: string
OpenApiMetadata:
type: object
required: [specUrl, version, etag]
properties:
specUrl:
type: string
description: URL to fetch the full OpenAPI spec
version:
type: string
description: API version
buildVersion:
type: string
description: Build version identifier
etag:
type: string
description: ETag for caching
signature:
type: string
description: Signature for spec verification
tags:
- name: Simulations
description: Task pack simulation without execution
- name: Runs
description: Pack run lifecycle management
- name: Logs
description: Run log streaming
- name: Artifacts
description: Run artifact management
- name: Approvals
description: Approval gate decisions
- name: Metadata
description: Service metadata and discovery

View File

@@ -0,0 +1,369 @@
# Authority Crypto Provider Contract
> **Status:** APPROVED
> **Version:** 1.0.0
> **Last Updated:** 2025-12-06
> **Owner:** Authority Core Guild
> **Unblocks:** AUTH-CRYPTO-90-001, SEC-CRYPTO-90-014, SCANNER-CRYPTO-90-001, ATTESTOR-CRYPTO-90-001
## Overview
This contract defines the Authority signing provider interface for StellaOps, enabling pluggable cryptographic backends including:
- **Software keys** (default) — ECDSA P-256/P-384, RSA, EdDSA
- **HSM integration** — PKCS#11, Cloud KMS (AWS, GCP, Azure)
- **Regional compliance** — CryptoPro GOST (R1), SM2/SM3 (CN), eIDAS (EU), FIPS 140-2
## Architecture
```
┌─────────────────────────────────────────────────────────────────────────────┐
│ Authority Crypto Provider │
├─────────────────────────────────────────────────────────────────────────────┤
│ │
│ ┌─────────────────────────────────────────────────────────────────────────┐│
│ │ ISigningProvider Interface ││
│ │ ││
│ │ + Sign(data: byte[], keyId: string) → SignatureResult ││
│ │ + Verify(data: byte[], signature: byte[], keyId: string) → bool ││
│ │ + GetPublicKey(keyId: string) → PublicKeyInfo ││
│ │ + ListKeys(filter: KeyFilter) → KeyInfo[] ││
│ │ + CreateKey(spec: KeySpec) → KeyInfo ││
│ │ + RotateKey(keyId: string) → KeyInfo ││
│ │ + ExportJWKS(keyIds: string[]) → JWKS ││
│ └─────────────────────────────────────────────────────────────────────────┘│
│ │ │
│ ┌────────────────────┼────────────────────┐ │
│ ▼ ▼ ▼ │
│ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │
│ │ Software │ │ PKCS#11 │ │ Cloud KMS │ │
│ │ Provider │ │ Provider │ │ Provider │ │
│ │ │ │ │ │ │ │
│ │ • File keys │ │ • HSM │ │ • AWS KMS │ │
│ │ • Memory │ │ • SmartCard │ │ • GCP KMS │ │
│ │ • Vault │ │ • CryptoPro │ │ • Azure KV │ │
│ └──────────────┘ └──────────────┘ └──────────────┘ │
│ │
└─────────────────────────────────────────────────────────────────────────────┘
```
## 1. ISigningProvider Interface
### 1.1 Core Methods
```csharp
/// <summary>
/// Pluggable cryptographic signing provider for Authority service.
/// </summary>
public interface ISigningProvider
{
/// <summary>Provider identifier (e.g., "software", "pkcs11", "aws-kms")</summary>
string ProviderId { get; }
/// <summary>Supported algorithms by this provider</summary>
IReadOnlyList<string> SupportedAlgorithms { get; }
/// <summary>Sign data with the specified key</summary>
Task<SignatureResult> SignAsync(
byte[] data,
string keyId,
SigningOptions? options = null,
CancellationToken ct = default);
/// <summary>Verify a signature</summary>
Task<bool> VerifyAsync(
byte[] data,
byte[] signature,
string keyId,
CancellationToken ct = default);
/// <summary>Get public key information</summary>
Task<PublicKeyInfo> GetPublicKeyAsync(
string keyId,
CancellationToken ct = default);
/// <summary>List available keys</summary>
Task<IReadOnlyList<KeyInfo>> ListKeysAsync(
KeyFilter? filter = null,
CancellationToken ct = default);
/// <summary>Create a new key pair</summary>
Task<KeyInfo> CreateKeyAsync(
KeySpec spec,
CancellationToken ct = default);
/// <summary>Rotate a key (create new version)</summary>
Task<KeyInfo> RotateKeyAsync(
string keyId,
CancellationToken ct = default);
/// <summary>Export keys as JWKS for distributed verification</summary>
Task<JsonWebKeySet> ExportJwksAsync(
IEnumerable<string>? keyIds = null,
CancellationToken ct = default);
/// <summary>Import a public key for verification</summary>
Task<KeyInfo> ImportPublicKeyAsync(
byte[] keyData,
string format,
KeyMetadata? metadata = null,
CancellationToken ct = default);
}
```
### 1.2 Supporting Types
```csharp
public record SignatureResult(
byte[] Signature,
string Algorithm,
string KeyId,
string? KeyVersion,
DateTimeOffset Timestamp);
public record SigningOptions(
string? Algorithm = null,
bool IncludeTimestamp = true,
string? Nonce = null);
public record PublicKeyInfo(
string KeyId,
string Algorithm,
byte[] PublicKey,
string Format, // "PEM", "DER", "JWK"
string? Fingerprint,
DateTimeOffset? ExpiresAt);
public record KeyInfo(
string KeyId,
string Algorithm,
KeyState State,
DateTimeOffset CreatedAt,
DateTimeOffset? ExpiresAt,
string? CurrentVersion,
IReadOnlyDictionary<string, string>? Metadata);
public enum KeyState
{
Active,
Disabled,
PendingDeletion,
Deleted
}
public record KeySpec(
string Algorithm,
int? KeySize = null,
string? Purpose = null, // "signing", "attestation", "authority"
IReadOnlyDictionary<string, string>? Metadata = null,
DateTimeOffset? ExpiresAt = null);
public record KeyFilter(
string? Purpose = null,
KeyState? State = null,
string? Algorithm = null);
```
## 2. Supported Algorithms
### 2.1 Algorithm Registry
| Algorithm | OID | Key Size | Compliance | Provider Support |
|-----------|-----|----------|------------|------------------|
| **ES256** | 1.2.840.10045.4.3.2 | P-256 | FIPS, eIDAS | All |
| **ES384** | 1.2.840.10045.4.3.3 | P-384 | FIPS, eIDAS | All |
| **RS256** | 1.2.840.113549.1.1.11 | 2048+ | FIPS, eIDAS | All |
| **RS384** | 1.2.840.113549.1.1.12 | 2048+ | FIPS, eIDAS | All |
| **EdDSA** | 1.3.101.112 | Ed25519 | — | Software, some HSM |
| **PS256** | 1.2.840.113549.1.1.10 | 2048+ | FIPS | All |
| **GOST R 34.10-2012** | 1.2.643.7.1.1.1.1 | 256/512 | R1 | PKCS#11 (CryptoPro) |
| **SM2** | 1.2.156.10197.1.301 | 256 | CN | PKCS#11 |
### 2.2 Default Configuration
```yaml
# etc/authority.yaml
crypto:
provider: software # or: pkcs11, aws-kms, gcp-kms, azure-keyvault
software:
keys_path: /var/lib/stellaops/keys
default_algorithm: ES256
pkcs11:
library_path: /usr/lib/libpkcs11.so
slot_id: 0
pin_env: AUTHORITY_HSM_PIN
# For CryptoPro:
# library_path: /opt/cprocsp/lib/amd64/libcapi20.so
aws_kms:
region: us-east-1
key_alias_prefix: stellaops/
azure_keyvault:
vault_url: https://stellaops.vault.azure.net/
gcp_kms:
project: stellaops-prod
location: global
key_ring: attestation-keys
# Regional compliance overrides
compliance:
ru:
provider: pkcs11
algorithms: [GOST-R-34.10-2012-256, GOST-R-34.10-2012-512]
library_path: /opt/cprocsp/lib/amd64/libcapi20.so
cn:
provider: pkcs11
algorithms: [SM2]
```
## 3. JWKS Export Requirements
### 3.1 JWKS Endpoint
The Authority service MUST expose a JWKS endpoint for distributed verification:
```
GET /.well-known/jwks.json
```
Response format:
```json
{
"keys": [
{
"kty": "EC",
"crv": "P-256",
"x": "base64url-encoded-x",
"y": "base64url-encoded-y",
"kid": "attestation-key-001",
"alg": "ES256",
"use": "sig",
"key_ops": ["verify"],
"x5t#S256": "sha256-fingerprint"
}
]
}
```
### 3.2 Key Rotation
When keys are rotated:
1. New key becomes `Active`, old key becomes `Disabled` (verification-only)
2. JWKS includes both keys during transition period
3. Old key removed after `rotation_grace_period` (default: 7 days)
4. All consuming services refresh JWKS on schedule or via webhook
### 3.3 Key Discovery Flow
```
┌──────────┐ ┌──────────┐ ┌──────────┐
│ Scanner │ │ Authority │ │ Attestor │
└────┬─────┘ └────┬─────┘ └────┬─────┘
│ │ │
│ GET /jwks.json│ │
│───────────────>│ │
│<───────────────│ │
│ JWKS │ │
│ │ │
│ Sign(SBOM) │ │
│───────────────>│ │
│<───────────────│ │
│ Signature │ │
│ │ │
│ │ GET /jwks.json │
│ │<────────────────│
│ │────────────────>│
│ │ JWKS │
│ │ │
│ │ Verify(SBOM) │
│ │<────────────────│
│ │ ✓ Valid │
```
## 4. Provider Registration
### 4.1 Service Registration
```csharp
// Program.cs
services.AddAuthoritySigningProvider(options =>
{
options.Provider = configuration["Crypto:Provider"];
options.Configuration = configuration.GetSection("Crypto");
});
// Extension method
public static IServiceCollection AddAuthoritySigningProvider(
this IServiceCollection services,
Action<CryptoProviderOptions> configure)
{
var options = new CryptoProviderOptions();
configure(options);
return options.Provider switch
{
"software" => services.AddSingleton<ISigningProvider, SoftwareSigningProvider>(),
"pkcs11" => services.AddSingleton<ISigningProvider, Pkcs11SigningProvider>(),
"aws-kms" => services.AddSingleton<ISigningProvider, AwsKmsSigningProvider>(),
"gcp-kms" => services.AddSingleton<ISigningProvider, GcpKmsSigningProvider>(),
"azure-keyvault" => services.AddSingleton<ISigningProvider, AzureKeyVaultSigningProvider>(),
_ => throw new ArgumentException($"Unknown provider: {options.Provider}")
};
}
```
### 4.2 Regional Provider Registry
For multi-region deployments with compliance requirements:
```yaml
# Regional key registry
key_registry:
attestation-sbom:
default:
key_id: "stellaops/attestation-sbom-001"
algorithm: ES256
provider: aws-kms
ru:
key_id: "ru/attestation-sbom-gost"
algorithm: GOST-R-34.10-2012-256
provider: pkcs11
cn:
key_id: "cn/attestation-sbom-sm2"
algorithm: SM2
provider: pkcs11
```
## 5. Error Codes
| Code | Name | Description |
|------|------|-------------|
| `CRYPTO_001` | `KEY_NOT_FOUND` | Requested key does not exist |
| `CRYPTO_002` | `KEY_DISABLED` | Key is disabled and cannot sign |
| `CRYPTO_003` | `ALGORITHM_UNSUPPORTED` | Algorithm not supported by provider |
| `CRYPTO_004` | `HSM_UNAVAILABLE` | HSM/PKCS#11 device not available |
| `CRYPTO_005` | `SIGNATURE_FAILED` | Signing operation failed |
| `CRYPTO_006` | `VERIFICATION_FAILED` | Signature verification failed |
| `CRYPTO_007` | `KEY_EXPIRED` | Key has expired |
| `CRYPTO_008` | `COMPLIANCE_VIOLATION` | Algorithm not allowed by compliance profile |
## 6. Tasks Unblocked
This contract unblocks:
| Task ID | Description | Status |
|---------|-------------|--------|
| AUTH-CRYPTO-90-001 | Authority signing provider contract | ✅ UNBLOCKED |
| SEC-CRYPTO-90-014 | Security Guild crypto integration | ✅ UNBLOCKED |
| SCANNER-CRYPTO-90-001 | Scanner SBOM signing | ✅ UNBLOCKED |
| ATTESTOR-CRYPTO-90-001 | Attestor DSSE signing | ✅ UNBLOCKED |
## 7. Changelog
| Date | Version | Change |
|------|---------|--------|
| 2025-12-06 | 1.0.0 | Initial contract with interface, algorithms, JWKS, regional support |

View File

@@ -0,0 +1,425 @@
# Sealed Install Enforcement Contract
> **Status:** APPROVED
> **Version:** 1.0.0
> **Last Updated:** 2025-12-06
> **Owner:** AirGap Controller Guild
> **Unblocks:** TASKRUN-AIRGAP-57-001, TASKRUN-AIRGAP-58-001
## Overview
This contract defines the sealed install enforcement semantics for StellaOps air-gapped deployments. When a pack or task declares `sealed_install: true`, the Task Runner MUST refuse to execute if the environment is not properly sealed.
## Architecture
```
┌─────────────────────────────────────────────────────────────────────────────┐
│ Sealed Install Enforcement Flow │
├─────────────────────────────────────────────────────────────────────────────┤
│ │
│ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │
│ │ Task Pack │ │ Task Runner │ │ AirGap │ │
│ │ │────>│ │────>│ Controller │ │
│ │ sealed_ │ │ Enforcement │ │ │ │
│ │ install:true │ │ Check │ │ /status │ │
│ └──────────────┘ └──────────────┘ └──────────────┘ │
│ │ │ │
│ ▼ ▼ │
│ ┌──────────────────────────────────┐ │
│ │ Decision Matrix │ │
│ │ │ │
│ │ Pack: sealed Env: sealed │ │
│ │ ────────────── ──────────── │ │
│ │ true true → RUN │ │
│ │ true false → DENY │ │
│ │ false true → RUN │ │
│ │ false false → RUN │ │
│ └──────────────────────────────────┘ │
│ │
└─────────────────────────────────────────────────────────────────────────────┘
```
## 1. Pack Declaration
### 1.1 Sealed Install Flag
Packs declare their sealed requirement in the pack manifest:
```json
{
"pack_id": "compliance-scan-airgap",
"version": "1.0.0",
"name": "Air-Gap Compliance Scanner",
"sealed_install": true,
"sealed_requirements": {
"min_bundle_version": "2025.10.0",
"max_advisory_staleness_hours": 168,
"require_time_anchor": true,
"allowed_offline_duration_hours": 720
}
}
```
### 1.2 Sealed Requirements Schema
```json
{
"type": "object",
"properties": {
"sealed_install": {
"type": "boolean",
"default": false,
"description": "If true, pack MUST run in sealed environment"
},
"sealed_requirements": {
"type": "object",
"properties": {
"min_bundle_version": {
"type": "string",
"description": "Minimum air-gap bundle version"
},
"max_advisory_staleness_hours": {
"type": "integer",
"minimum": 1,
"default": 168,
"description": "Maximum age of advisory data in hours"
},
"require_time_anchor": {
"type": "boolean",
"default": true,
"description": "Require valid time anchor"
},
"allowed_offline_duration_hours": {
"type": "integer",
"minimum": 1,
"default": 720,
"description": "Maximum allowed offline duration"
},
"require_signature_verification": {
"type": "boolean",
"default": true,
"description": "Require bundle signature verification"
}
}
}
}
}
```
## 2. Environment Detection
### 2.1 Sealed Mode Status API
The Task Runner queries the AirGap Controller to determine sealed status:
```
GET /api/v1/airgap/status
```
Response:
```json
{
"sealed": true,
"mode": "sealed",
"sealed_at": "2025-12-01T00:00:00Z",
"sealed_by": "ops-admin@company.com",
"bundle_version": "2025.10.0",
"bundle_digest": "sha256:abc123...",
"last_advisory_update": "2025-12-01T00:00:00Z",
"advisory_staleness_hours": 120,
"time_anchor": {
"timestamp": "2025-12-01T00:00:00Z",
"signature": "base64...",
"valid": true,
"expires_at": "2025-12-31T00:00:00Z"
},
"egress_blocked": true,
"network_policy": "deny-all"
}
```
### 2.2 Detection Heuristics
If the AirGap Controller is unavailable, the Task Runner uses fallback heuristics:
| Heuristic | Weight | Indicates |
|-----------|--------|-----------|
| No external DNS resolution | High | Sealed |
| Blocked ports 80, 443 | High | Sealed |
| AIRGAP_MODE=sealed env var | High | Sealed |
| /etc/stellaops/sealed file exists | Medium | Sealed |
| No internet connectivity | Medium | Sealed |
| Local-only registry configured | Low | Sealed |
Combined heuristic score threshold: **0.7** to consider environment sealed.
## 3. Enforcement Logic
### 3.1 Pre-Execution Check
```csharp
public sealed class SealedInstallEnforcer
{
public async Task<EnforcementResult> EnforceAsync(
TaskPack pack,
CancellationToken ct = default)
{
// If pack doesn't require sealed install, allow
if (!pack.SealedInstall)
{
return EnforcementResult.Allowed("Pack does not require sealed install");
}
// Get environment sealed status
var status = await _airgapController.GetStatusAsync(ct);
// Core check: environment must be sealed
if (!status.Sealed)
{
return EnforcementResult.Denied(
"SEALED_INSTALL_VIOLATION",
"Pack requires sealed environment but environment is not sealed",
new SealedInstallViolation
{
PackId = pack.PackId,
RequiredSealed = true,
ActualSealed = false,
Recommendation = "Activate sealed mode with: stella airgap seal"
});
}
// Check sealed requirements
if (pack.SealedRequirements != null)
{
var violations = ValidateRequirements(pack.SealedRequirements, status);
if (violations.Any())
{
return EnforcementResult.Denied(
"SEALED_REQUIREMENTS_VIOLATION",
"Sealed requirements not met",
violations);
}
}
return EnforcementResult.Allowed("Sealed install requirements satisfied");
}
private List<RequirementViolation> ValidateRequirements(
SealedRequirements requirements,
SealedModeStatus status)
{
var violations = new List<RequirementViolation>();
// Bundle version check
if (requirements.MinBundleVersion != null)
{
if (Version.Parse(status.BundleVersion) < Version.Parse(requirements.MinBundleVersion))
{
violations.Add(new RequirementViolation
{
Requirement = "min_bundle_version",
Expected = requirements.MinBundleVersion,
Actual = status.BundleVersion,
Message = $"Bundle version {status.BundleVersion} < required {requirements.MinBundleVersion}"
});
}
}
// Advisory staleness check
if (status.AdvisoryStalenessHours > requirements.MaxAdvisoryStalenessHours)
{
violations.Add(new RequirementViolation
{
Requirement = "max_advisory_staleness_hours",
Expected = requirements.MaxAdvisoryStalenessHours.ToString(),
Actual = status.AdvisoryStalenessHours.ToString(),
Message = $"Advisory data is {status.AdvisoryStalenessHours}h old, max allowed is {requirements.MaxAdvisoryStalenessHours}h"
});
}
// Time anchor check
if (requirements.RequireTimeAnchor && (status.TimeAnchor == null || !status.TimeAnchor.Valid))
{
violations.Add(new RequirementViolation
{
Requirement = "require_time_anchor",
Expected = "valid time anchor",
Actual = status.TimeAnchor?.Valid.ToString() ?? "missing",
Message = "Valid time anchor required but not present"
});
}
return violations;
}
}
```
### 3.2 Decision Matrix
| Pack `sealed_install` | Environment Sealed | Bundle Valid | Advisories Fresh | Result |
|-----------------------|-------------------|--------------|------------------|--------|
| `true` | `true` | `true` | `true` | ✅ RUN |
| `true` | `true` | `true` | `false` | ⚠️ WARN + RUN (if within grace) |
| `true` | `true` | `false` | * | ❌ DENY |
| `true` | `false` | * | * | ❌ DENY |
| `false` | `true` | * | * | ✅ RUN |
| `false` | `false` | * | * | ✅ RUN |
### 3.3 Grace Period Handling
For advisory staleness, a grace period can be configured:
```yaml
# etc/taskrunner.yaml
enforcement:
sealed_install:
staleness_grace_period_hours: 24
staleness_warning_threshold_hours: 120
deny_on_staleness: true # or false for warn-only
```
## 4. Refusal Semantics
### 4.1 Error Response
When enforcement denies execution:
```json
{
"error": {
"code": "SEALED_INSTALL_VIOLATION",
"message": "Pack requires sealed environment but environment is not sealed",
"details": {
"pack_id": "compliance-scan-airgap",
"pack_version": "1.0.0",
"sealed_install_required": true,
"environment_sealed": false,
"violations": [],
"recommendation": "Activate sealed mode with: stella airgap seal"
}
},
"status": "rejected",
"rejected_at": "2025-12-06T10:00:00Z"
}
```
### 4.2 CLI Exit Codes
| Code | Name | Description |
|------|------|-------------|
| 40 | `SEALED_INSTALL_VIOLATION` | Pack requires sealed but environment is not |
| 41 | `BUNDLE_VERSION_VIOLATION` | Bundle version below minimum |
| 42 | `ADVISORY_STALENESS_VIOLATION` | Advisory data too stale |
| 43 | `TIME_ANCHOR_VIOLATION` | Time anchor missing or invalid |
| 44 | `SIGNATURE_VERIFICATION_VIOLATION` | Bundle signature verification failed |
### 4.3 Audit Logging
All enforcement decisions are logged:
```json
{
"event_type": "sealed_install_enforcement",
"timestamp": "2025-12-06T10:00:00Z",
"pack_id": "compliance-scan-airgap",
"pack_version": "1.0.0",
"decision": "denied",
"reason": "SEALED_INSTALL_VIOLATION",
"environment": {
"sealed": false,
"bundle_version": null,
"advisory_staleness_hours": null
},
"user": "task-runner-service",
"tenant_id": "550e8400-e29b-41d4-a716-446655440000"
}
```
## 5. Integration Points
### 5.1 Task Runner Integration
```csharp
// In TaskRunner execution pipeline
public async Task<TaskResult> ExecuteAsync(TaskPack pack, TaskContext context)
{
// Pre-execution enforcement
var enforcement = await _sealedInstallEnforcer.EnforceAsync(pack);
if (!enforcement.Allowed)
{
await _auditLogger.LogEnforcementDenialAsync(pack, enforcement);
return TaskResult.Rejected(enforcement);
}
// Continue with execution
return await _executor.ExecuteAsync(pack, context);
}
```
### 5.2 CLI Integration
```bash
# Check sealed status before running pack
$ stella pack run compliance-scan-airgap
Error: Sealed install violation
Pack 'compliance-scan-airgap' requires a sealed environment.
Current environment:
Sealed: false
To resolve:
1. Import an air-gap bundle: stella airgap import <bundle.tar.gz>
2. Activate sealed mode: stella airgap seal
3. Verify status: stella airgap status
Exit code: 40
```
## 6. Configuration
### 6.1 Task Runner Configuration
```yaml
# etc/taskrunner.yaml
enforcement:
sealed_install:
enabled: true
# Staleness handling
staleness_grace_period_hours: 24
staleness_warning_threshold_hours: 120
deny_on_staleness: true
# Fallback detection
use_heuristic_detection: true
heuristic_threshold: 0.7
# Logging
log_all_decisions: true
audit_retention_days: 365
```
### 6.2 Environment Variables
| Variable | Description | Default |
|----------|-------------|---------|
| `AIRGAP_MODE` | Force sealed mode detection | — |
| `AIRGAP_CONTROLLER_URL` | AirGap controller endpoint | `http://localhost:8080` |
| `SEALED_INSTALL_BYPASS` | Bypass enforcement (dev only) | `false` |
## 7. Tasks Unblocked
This contract unblocks:
| Task ID | Description | Status |
|---------|-------------|--------|
| TASKRUN-AIRGAP-57-001 | Sealed install enforcement contract | ✅ UNBLOCKED |
| TASKRUN-AIRGAP-58-001 | Sealed install CLI integration | ✅ UNBLOCKED |
## 8. Changelog
| Date | Version | Change |
|------|---------|--------|
| 2025-12-06 | 1.0.0 | Initial contract with enforcement logic, decision matrix, CLI integration |

View File

@@ -38,28 +38,28 @@
## Delivery Tracker
| # | Task ID | Status | Key dependency / next step | Owners | Task Definition |
| --- | --- | --- | --- | --- | --- |
| P1 | PREP-SCANNER-BUN-001-DESIGN-DOC | TODO | Due TBD · Accountable: Bun Analyzer Guild | Bun Analyzer Guild | Document Bun analyzer design at `docs/modules/scanner/prep/bun-analyzer-design.md` covering lockfile schema, discovery heuristics, evidence model, and CLI/WebService contract. |
| 1 | SCANNER-BUN-001 | TODO | Await P1 | Bun Analyzer Guild | Create project scaffold: `StellaOps.Scanner.Analyzers.Lang.Bun.csproj`, plugin manifest (`manifest.json`), and `BunAnalyzerPlugin` implementing `ILanguageAnalyzerPlugin`. |
| 2 | SCANNER-BUN-002 | TODO | Depends on task 1 | Bun Analyzer Guild | Implement `BunProjectDiscoverer`: identify candidate roots by presence of `package.json` + (`bun.lock` \| `bun.lockb` \| `bunfig.toml` \| `node_modules/.bun/`). |
| 3 | SCANNER-BUN-003 | TODO | Depends on task 2 | Bun Analyzer Guild | Implement `BunInputNormalizer`: classify each root as installed-path (node_modules exists) or lockfile-path (`bun.lock` only) or unsupported (`bun.lockb` only). |
| 4 | SCANNER-BUN-004 | TODO | Depends on task 3 | Bun Analyzer Guild | Implement `bun.lockb` unsupported handler: emit remediation finding with migration command (`bun install --save-text-lockfile`). |
| 5 | SCANNER-BUN-005 | TODO | Depends on task 3 | Bun Analyzer Guild | Implement `BunLockParser`: tolerant JSONC parser for `bun.lock` text format; extract (name, version, resolved, integrity) tuples. |
| 6 | SCANNER-BUN-006 | TODO | Depends on task 5 | Bun Analyzer Guild | Implement dev/prod dependency filtering for lockfile path; expose `include_dev` configuration option. |
| 7 | SCANNER-BUN-007 | TODO | Depends on task 3 | Bun Analyzer Guild | Implement `BunInstalledCollector`: traverse `node_modules/**/package.json` and `node_modules/.bun/**/package.json` with symlink-safe walker. |
| 8 | SCANNER-BUN-008 | TODO | Depends on task 7 | Bun Analyzer Guild | Implement symlink safety: follow symlinks only within root prefix; maintain visited inode/realpath set; record logical + real paths for evidence. |
| 9 | SCANNER-BUN-009 | TODO | Depends on task 7 | Bun Analyzer Guild | Extract package metadata from `package.json`: name, version, private flag; attach lockfile evidence (resolved, integrity) when available. |
| 10 | SCANNER-BUN-010 | TODO | Depends on tasks 5, 9 | Bun Analyzer Guild | Implement `BunPackageNormalizer`: deduplicate by (name, version); accumulate occurrence paths for traceability; emit `LanguageComponentRecord`. |
| 11 | SCANNER-BUN-011 | TODO | Depends on task 10 | Bun Analyzer Guild | PURL generation: emit `pkg:npm/<name>@<version>` with correct scoped-package encoding (`@scope/pkg``%40scope/pkg`). |
| 12 | SCANNER-BUN-012 | TODO | Depends on task 10 | Bun Analyzer Guild | Evidence emission: attach `LanguageComponentEvidence` with kind (File/Metadata), source (`node_modules`/`bun.lock`), locator (path), and optional sha256. |
| 13 | SCANNER-BUN-013 | TODO | Depends on task 12 | Bun Analyzer Guild | Assemble `BunLanguageAnalyzer` orchestrating discovery → input normalization → collection → normalization → emit via `LanguageComponentWriter`. |
| 14 | SCANNER-BUN-014 | TODO | Depends on task 13 | Bun Analyzer Guild | Performance guards: implement max-files-per-root cap, max-symlink-depth limit, prefix pruning to avoid full image traversal. |
| 15 | SCANNER-BUN-015 | TODO | Depends on task 13 | QA Guild | Create test project `StellaOps.Scanner.Analyzers.Lang.Bun.Tests` with golden fixture harness using `LanguageAnalyzerTestHarness.AssertDeterministicAsync`. |
| 16 | SCANNER-BUN-016 | TODO | Depends on task 15 | QA Guild | Fixture: Standard Bun install (hoisted/default linker) with `node_modules` and `bun.lock`; verify installed inventory path. |
| 17 | SCANNER-BUN-017 | TODO | Depends on task 15 | QA Guild | Fixture: Isolated linker install (`bun install --linker isolated`) with packages under `node_modules/.bun/`; verify `.bun/` traversal. |
| 18 | SCANNER-BUN-018 | TODO | Depends on task 15 | QA Guild | Fixture: Lockfile-only image (no `node_modules`); verify lockfile inventory path and dev/prod filtering. |
| 19 | SCANNER-BUN-019 | TODO | Depends on task 15 | QA Guild | Fixture: Binary lockfile only (`bun.lockb`); verify unsupported remediation message emitted. |
| 20 | SCANNER-BUN-020 | TODO | Depends on task 15 | QA Guild | Fixture: Monorepo/workspaces with multiple `package.json` under single lock; verify workspace member handling. |
| 21 | SCANNER-BUN-021 | TODO | Depends on task 15 | QA Guild | Fixture: Symlink corner cases (verify no traversal outside root, no infinite loops, both logical/real paths in evidence). |
| P1 | PREP-SCANNER-BUN-001-DESIGN-DOC | DONE (2025-12-06) | Design doc at `docs/modules/scanner/prep/bun-analyzer-design.md` | Bun Analyzer Guild | Document Bun analyzer design at `docs/modules/scanner/prep/bun-analyzer-design.md` covering lockfile schema, discovery heuristics, evidence model, and CLI/WebService contract. |
| 1 | SCANNER-BUN-001 | DONE (2025-12-06) | Scaffold at `StellaOps.Scanner.Analyzers.Lang.Bun` | Bun Analyzer Guild | Create project scaffold: `StellaOps.Scanner.Analyzers.Lang.Bun.csproj`, plugin manifest (`manifest.json`), and `BunAnalyzerPlugin` implementing `ILanguageAnalyzerPlugin`. |
| 2 | SCANNER-BUN-002 | DONE (2025-12-06) | `BunProjectDiscoverer.cs` implemented | Bun Analyzer Guild | Implement `BunProjectDiscoverer`: identify candidate roots by presence of `package.json` + (`bun.lock` \| `bun.lockb` \| `bunfig.toml` \| `node_modules/.bun/`). |
| 3 | SCANNER-BUN-003 | DONE (2025-12-06) | `BunInputNormalizer.cs` implemented | Bun Analyzer Guild | Implement `BunInputNormalizer`: classify each root as installed-path (node_modules exists) or lockfile-path (`bun.lock` only) or unsupported (`bun.lockb` only). |
| 4 | SCANNER-BUN-004 | DONE (2025-12-06) | `EmitBinaryLockfileRemediation` in BunLanguageAnalyzer | Bun Analyzer Guild | Implement `bun.lockb` unsupported handler: emit remediation finding with migration command (`bun install --save-text-lockfile`). |
| 5 | SCANNER-BUN-005 | DONE (2025-12-06) | `BunLockParser.cs` with JSONC support | Bun Analyzer Guild | Implement `BunLockParser`: tolerant JSONC parser for `bun.lock` text format; extract (name, version, resolved, integrity) tuples. |
| 6 | SCANNER-BUN-006 | DONE (2025-12-06) | `IncludeDev` in BunInputClassification | Bun Analyzer Guild | Implement dev/prod dependency filtering for lockfile path; expose `include_dev` configuration option. |
| 7 | SCANNER-BUN-007 | DONE (2025-12-06) | `BunInstalledCollector.cs` implemented | Bun Analyzer Guild | Implement `BunInstalledCollector`: traverse `node_modules/**/package.json` and `node_modules/.bun/**/package.json` with symlink-safe walker. |
| 8 | SCANNER-BUN-008 | DONE (2025-12-06) | Symlink safety in BunInstalledCollector | Bun Analyzer Guild | Implement symlink safety: follow symlinks only within root prefix; maintain visited inode/realpath set; record logical + real paths for evidence. |
| 9 | SCANNER-BUN-009 | DONE (2025-12-06) | `TryParsePackage` in BunInstalledCollector | Bun Analyzer Guild | Extract package metadata from `package.json`: name, version, private flag; attach lockfile evidence (resolved, integrity) when available. |
| 10 | SCANNER-BUN-010 | DONE (2025-12-06) | `BunPackageNormalizer.cs` implemented | Bun Analyzer Guild | Implement `BunPackageNormalizer`: deduplicate by (name, version); accumulate occurrence paths for traceability; emit `LanguageComponentRecord`. |
| 11 | SCANNER-BUN-011 | DONE (2025-12-06) | `BuildPurl` in BunPackage | Bun Analyzer Guild | PURL generation: emit `pkg:npm/<name>@<version>` with correct scoped-package encoding (`@scope/pkg``%40scope/pkg`). |
| 12 | SCANNER-BUN-012 | DONE (2025-12-06) | `CreateEvidence` in BunPackage | Bun Analyzer Guild | Evidence emission: attach `LanguageComponentEvidence` with kind (File/Metadata), source (`node_modules`/`bun.lock`), locator (path), and optional sha256. |
| 13 | SCANNER-BUN-013 | DONE (2025-12-06) | `BunLanguageAnalyzer.cs` orchestration complete | Bun Analyzer Guild | Assemble `BunLanguageAnalyzer` orchestrating discovery → input normalization → collection → normalization → emit via `LanguageComponentWriter`. |
| 14 | SCANNER-BUN-014 | DONE (2025-12-06) | MaxFilesPerRoot/MaxSymlinkDepth guards in place | Bun Analyzer Guild | Performance guards: implement max-files-per-root cap, max-symlink-depth limit, prefix pruning to avoid full image traversal. |
| 15 | SCANNER-BUN-015 | DONE (2025-12-06) | Test project with 6 test methods | QA Guild | Create test project `StellaOps.Scanner.Analyzers.Lang.Bun.Tests` with golden fixture harness using `LanguageAnalyzerTestHarness.AssertDeterministicAsync`. |
| 16 | SCANNER-BUN-016 | DONE (2025-12-06) | `StandardInstallProducesDeterministicOutputAsync` test | QA Guild | Fixture: Standard Bun install (hoisted/default linker) with `node_modules` and `bun.lock`; verify installed inventory path. |
| 17 | SCANNER-BUN-017 | DONE (2025-12-06) | `IsolatedLinkerInstallIsParsedAsync` test | QA Guild | Fixture: Isolated linker install (`bun install --linker isolated`) with packages under `node_modules/.bun/`; verify `.bun/` traversal. |
| 18 | SCANNER-BUN-018 | DONE (2025-12-06) | `LockfileOnlyIsParsedAsync` test | QA Guild | Fixture: Lockfile-only image (no `node_modules`); verify lockfile inventory path and dev/prod filtering. |
| 19 | SCANNER-BUN-019 | DONE (2025-12-06) | `BinaryLockfileEmitsRemediationAsync` test | QA Guild | Fixture: Binary lockfile only (`bun.lockb`); verify unsupported remediation message emitted. |
| 20 | SCANNER-BUN-020 | DONE (2025-12-06) | `WorkspacesAreParsedAsync` test | QA Guild | Fixture: Monorepo/workspaces with multiple `package.json` under single lock; verify workspace member handling. |
| 21 | SCANNER-BUN-021 | DONE (2025-12-06) | `SymlinkSafetyIsEnforcedAsync` test | QA Guild | Fixture: Symlink corner cases (verify no traversal outside root, no infinite loops, both logical/real paths in evidence). |
| 22 | SCANNER-BUN-022 | TODO | Depends on task 14 | CLI Guild | Implement `stellaops-cli bun inspect` verb: display Bun package inventory for local root or scan ID; wire into `CommandFactory`. |
| 23 | SCANNER-BUN-023 | TODO | Depends on task 22 | CLI Guild | Implement `stellaops-cli bun resolve` verb: resolve Bun packages by scan ID, digest, or image reference with JSON/table output. |
| 24 | SCANNER-BUN-024 | TODO | Depends on task 23 | CLI Guild | Add CLI unit tests for Bun verbs (`CommandFactoryTests`, JSON output assertions); update CLI help text and golden outputs. |
@@ -72,6 +72,7 @@
## Execution Log
| Date (UTC) | Update | Owner |
| --- | --- | --- |
| 2025-12-06 | Completed P1 through 21 (Waves AD): Created design doc at `docs/modules/scanner/prep/bun-analyzer-design.md`. Verified core analyzer implementation in `StellaOps.Scanner.Analyzers.Lang.Bun`: BunAnalyzerPlugin, BunLanguageAnalyzer, BunProjectDiscoverer, BunInputNormalizer, BunLockParser (JSONC with git/tarball/workspace source detection), BunInstalledCollector (symlink-safe), BunPackageNormalizer, BunPackage (PURL + evidence). Performance guards (MaxFilesPerRoot=50000, MaxSymlinkDepth=10) in place. Test project with 6 golden fixture tests. Build succeeds. | Implementer |
| 2025-12-05 | Sprint file created from product advisory; 29 tasks across 6 waves (AF) covering core analyzer, testing, CLI/WebService/Worker integration, and docs. | Planning |
## Decisions & Risks

View File

@@ -25,9 +25,9 @@
| 2 | TASKRUN-AIRGAP-56-002 | DONE (2025-12-03) | Helper delivered; downstream AIRGAP-57/58 await controller/importer bundle specs. | Task Runner Guild · AirGap Importer Guild | Add helper steps for bundle ingestion (checksum verification, staging to object store) with deterministic outputs. |
| 3 | TASKRUN-AIRGAP-57-001 | BLOCKED (2025-11-30) | Depends on 56-002; awaiting sealed-install enforcement contract. | Task Runner Guild · AirGap Controller Guild | Refuse to execute plans when environment sealed=false but declared sealed install; emit advisory timeline events. |
| 4 | TASKRUN-AIRGAP-58-001 | BLOCKED (2025-11-30) | Depends on 57-001. | Task Runner Guild · Evidence Locker Guild | Capture bundle import job transcripts, hashed inputs/outputs into portable evidence bundles. |
| 5 | TASKRUN-42-001 | TODO | ✅ Control-flow contract at `docs/schemas/taskpack-control-flow.schema.json`; proceed with execution engine upgrades (loops/conditionals/maxParallel), simulation mode, policy gate integration, deterministic failure recovery. | Task Runner Guild (`src/TaskRunner/StellaOps.TaskRunner`) | Execution engine enhancements + simulation API/CLI. |
| 6 | TASKRUN-OAS-61-001 | TODO | ✅ Control-flow contract published 2025-12-06; proceed with OAS freeze. | Task Runner Guild · API Contracts Guild | Document TaskRunner APIs (pack runs, logs, approvals) with streaming schemas/examples. |
| 7 | TASKRUN-OAS-61-002 | TODO | Depends on 61-001; ready once OAS documented. | Task Runner Guild | Expose `GET /.well-known/openapi` returning signed spec metadata, build version, ETag. |
| 5 | TASKRUN-42-001 | DONE (2025-12-06) | Implemented Loop/Conditional step kinds, extended execution graph/simulation engine, added manifest/planner/validator support, 128 tests passing. | Task Runner Guild (`src/TaskRunner/StellaOps.TaskRunner`) | Execution engine enhancements + simulation API/CLI. |
| 6 | TASKRUN-OAS-61-001 | DONE (2025-12-06) | Created `docs/api/taskrunner-openapi.yaml` with full API documentation including streaming logs (NDJSON), loop/conditional/policy gate schemas. | Task Runner Guild · API Contracts Guild | Document TaskRunner APIs (pack runs, logs, approvals) with streaming schemas/examples. |
| 7 | TASKRUN-OAS-61-002 | TODO | ✅ 61-001 DONE; endpoint already implemented in Program.cs; needs signing integration. | Task Runner Guild | Expose `GET /.well-known/openapi` returning signed spec metadata, build version, ETag. |
| 8 | TASKRUN-OAS-62-001 | TODO | Depends on 61-002. | Task Runner Guild · SDK Generator Guild | SDK examples for pack run lifecycle; streaming log helpers; paginator wrappers. |
| 9 | TASKRUN-OAS-63-001 | TODO | Depends on 62-001. | Task Runner Guild · API Governance Guild | Sunset/deprecation headers + notifications for legacy pack APIs. |
| 10 | TASKRUN-OBS-50-001 | DONE (2025-11-25) | Telemetry core adoption. | Task Runner Guild | Add telemetry core in host + worker; spans/logs include `trace_id`, `tenant_id`, `run_id`, scrubbed transcripts. |
@@ -56,6 +56,8 @@
## Execution Log
| Date (UTC) | Update | Owner |
| --- | --- | --- |
| 2025-12-06 | TASKRUN-OAS-61-001 DONE: Created `docs/api/taskrunner-openapi.yaml` OpenAPI 3.1 specification documenting all TaskRunner WebService APIs: POST /v1/task-runner/simulations (simulate task pack), POST /v1/task-runner/runs (create run), GET /v1/task-runner/runs/{runId} (get state), GET /v1/task-runner/runs/{runId}/logs (NDJSON streaming), GET /v1/task-runner/runs/{runId}/artifacts (list artifacts), POST /v1/task-runner/runs/{runId}/approvals/{approvalId} (apply decision), POST /v1/task-runner/runs/{runId}/cancel (cancel run), GET /.well-known/openapi (metadata). Includes LoopInfo, ConditionalInfo, PolicyInfo schemas for new control-flow steps. Examples provided for all endpoints. | Implementer |
| 2025-12-06 | TASKRUN-42-001 DONE: Extended `PackRunStepKind` enum with `Loop` and `Conditional`. Added `PackRunLoopConfig`, `PackRunConditionalConfig`, `PackRunPolicyGateConfig` record types to `PackRunExecutionGraph.cs`. Updated `PackRunExecutionGraphBuilder` to extract loop/conditional/policy gate configs. Extended `PackRunSimulationEngine` and `PackRunSimulationModels.cs` with `WillIterate`/`WillBranch` statuses and simulation info records. Added `TaskPackLoopStep`, `TaskPackConditionalStep` manifest models. Updated `TaskPackPlanner` with `BuildLoopStep`/`BuildConditionalStep` methods. Updated `TaskPackManifestValidator` for loop/conditional validation. Added 3 new simulation tests (loop, conditional, policy gate); 128 total tests passing. | Implementer |
| 2025-12-06 | TASKRUN-OBS-53-001 DONE: Created `PackRunEvidenceSnapshot.cs` domain model with Merkle root computation for hash chain integrity. Created `IPackRunEvidenceSnapshotService.cs` with service for capturing run completion, step execution, approval decisions, and policy evaluations. Created `IPackRunEvidenceStore.cs` with InMemoryPackRunEvidenceStore for testing. Created `IPackRunRedactionGuard.cs` with PackRunRedactionGuard for sensitive data redaction (bearer tokens, passwords, emails, identities). Added 29 comprehensive tests in `PackRunEvidenceSnapshotTests.cs`. Build verified (0 errors), all tests passing. | Implementer |
| 2025-12-06 | TASKRUN-OBS-52-001 DONE: Created `PackRunTimelineEvent.cs` domain model per timeline-event.schema.json with event types (pack.started, pack.step.completed, pack.failed, etc.). Created `PackRunTimelineEventEmitter.cs` with retry logic and deterministic batch ordering. Created `IPackRunTimelineEventSink.cs` with InMemoryPackRunTimelineEventSink for testing. Added 32 comprehensive tests in `PackRunTimelineEventTests.cs`. Build verified (0 errors), all tests passing. | Implementer |
| 2025-12-05 | **OBS Unblocked:** TASKRUN-OBS-52-001 and TASKRUN-OBS-53-001 changed from BLOCKED to TODO. Root blocker resolved: `timeline-event.schema.json` created 2025-12-04 per BLOCKED_DEPENDENCY_TREE.md Section 8.3. | Implementer |

View File

@@ -0,0 +1,282 @@
# Bun Analyzer Design — PREP-SCANNER-BUN-001-DESIGN-DOC
Status: Draft (2025-12-06)
Owners: Bun Analyzer Guild · Scanner Guild
Scope: Bun package manager analyzer for npm-ecosystem vulnerability scanning in container filesystems.
## Overview
The Bun analyzer extracts npm-ecosystem package inventory from Bun-managed JavaScript/TypeScript projects. Bun uses npm-compatible package.json and produces packages in `node_modules`, making it similar to the Node analyzer but with distinct lockfile formats and installation structures.
## Supported Artifacts
### Lockfile Formats
| Format | Extension | Status | Notes |
|--------|-----------|--------|-------|
| Text lockfile | `bun.lock` | Supported | JSONC format with package metadata |
| Binary lockfile | `bun.lockb` | Unsupported | Undocumented binary format; emit migration guidance |
### Installation Structures
| Structure | Discovery Pattern | Notes |
|-----------|-------------------|-------|
| Default (hoisted) | `node_modules/**/package.json` | Standard flat structure |
| Isolated linker | `node_modules/.bun/**/package.json` | Symlink-heavy, requires safe traversal |
## Discovery Heuristics
### Project Root Detection
A directory is considered a Bun project root when:
1. `package.json` exists, AND
2. One or more of:
- `bun.lock` exists (text lockfile)
- `bun.lockb` exists (binary lockfile — triggers unsupported message)
- `bunfig.toml` exists (Bun configuration)
- `node_modules/.bun/` exists (isolated linker marker)
### Input Classification
```
BunInputNormalizer classifies each root:
├── InstalledPath: node_modules exists → traverse installed packages
├── LockfilePath: bun.lock only (no node_modules) → parse lockfile
└── Unsupported: bun.lockb only → emit remediation finding
```
## Lockfile Schema (`bun.lock`)
The `bun.lock` text format is a JSONC variant (JSON with comments and trailing commas):
```jsonc
{
"lockfileVersion": 1,
"workspaces": {
"": {
"name": "my-app",
"dependencies": {
"lodash": "^4.17.21"
}
}
},
"packages": {
"lodash@4.17.21": {
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57...",
"dependencies": {}
}
}
}
```
### Extracted Fields
| Field | Source | Usage |
|-------|--------|-------|
| name | packages key (before `@version`) | PURL name component |
| version | packages key (after `@`) | PURL version component |
| resolved | packages[].resolved | Evidence locator |
| integrity | packages[].integrity | Evidence hash (sha512/sha256) |
## Evidence Model
### LanguageComponentEvidence Structure
```csharp
record BunPackageEvidence(
LanguageEvidenceKind Kind, // File | Metadata | Lockfile
string Source, // "node_modules" | "bun.lock"
string Locator, // File path or registry URL
string? Content, // package.json content (if File)
string? Sha256); // Content hash
```
### Evidence Collection Matrix
| Source | Kind | Locator | Content | Hash |
|--------|------|---------|---------|------|
| `node_modules/**/package.json` | File | Relative path | JSON content | sha256 of content |
| `bun.lock` | Lockfile | `bun.lock:packages[name@version]` | null | null |
| Registry resolution | Metadata | resolved URL | null | integrity value |
## PURL Generation
Bun packages are npm packages with `bun` package manager qualifier:
```
pkg:npm/<name>@<version>?package_manager=bun
```
### Scoped Package Encoding
| Raw Name | Encoded PURL |
|----------|--------------|
| `lodash` | `pkg:npm/lodash@4.17.21?package_manager=bun` |
| `@types/node` | `pkg:npm/%40types/node@20.10.0?package_manager=bun` |
| `@org/pkg` | `pkg:npm/%40org/pkg@1.0.0?package_manager=bun` |
## Symlink Safety
Bun's isolated linker creates symlink-heavy structures. Safety requirements:
1. **Prefix Containment**: Only follow symlinks that resolve within root path
2. **Cycle Detection**: Maintain visited inode/realpath set
3. **Path Recording**: Record both logical path (symlink) and real path (target)
4. **Depth Limit**: Cap symlink depth at 32 levels (configurable)
```csharp
record SymlinkSafetyContext(
string RootPrefix,
HashSet<(long Inode, long Device)> VisitedInodes,
int MaxDepth = 32);
```
## Performance Guards
| Guard | Default | Rationale |
|-------|---------|-----------|
| max-files-per-root | 50,000 | Prevent full image traversal |
| max-symlink-depth | 32 | Avoid infinite loops in malformed structures |
| prefix-pruning | enabled | Skip paths outside expected locations |
| timeout-per-root | 60s | Bound analysis time per project |
## CLI Contract
### `stellaops-cli bun inspect`
Display Bun package inventory for local root or scan ID:
```bash
# Local analysis
stellaops-cli bun inspect /path/to/project
# Remote scan lookup
stellaops-cli bun inspect --scan-id abc123
```
Output formats: `--output json|table|ndjson`
### `stellaops-cli bun resolve`
Resolve Bun packages by scan ID, digest, or image reference:
```bash
stellaops-cli bun resolve --scan-id abc123 --package lodash
stellaops-cli bun resolve --digest sha256:abc... --format json
```
## WebService Contract
### `GET /api/scans/{scanId}/bun-packages`
Returns inventory of Bun packages for a completed scan.
Query parameters:
- `page`, `pageSize`: Pagination
- `name`: Filter by package name (prefix match)
- `scope`: Filter by npm scope
Response schema:
```json
{
"scanId": "abc123",
"analyzer": "bun",
"packages": [
{
"name": "lodash",
"version": "4.17.21",
"purl": "pkg:npm/lodash@4.17.21?package_manager=bun",
"evidence": [
{
"kind": "File",
"source": "node_modules",
"locator": "node_modules/lodash/package.json",
"sha256": "abc..."
}
]
}
],
"total": 150,
"page": 1,
"pageSize": 50
}
```
## Unsupported Artifact Handling
### Binary Lockfile (`bun.lockb`)
When only `bun.lockb` is present (no `bun.lock` or `node_modules`):
1. Emit remediation finding with severity `Info`
2. Provide migration command: `bun install --save-text-lockfile`
3. Skip package enumeration (no false negatives from partial binary parse)
```csharp
record BunLockbUnsupportedFinding(
string Path,
string RemediationCommand = "bun install --save-text-lockfile",
string Reason = "Binary lockfile format is undocumented and unstable");
```
## Test Fixtures
| Fixture | Purpose | Validation |
|---------|---------|------------|
| `hoisted-install` | Standard Bun install with `node_modules` + `bun.lock` | Installed inventory path |
| `isolated-linker` | `bun install --linker isolated` structure | `.bun/` traversal |
| `lockfile-only` | No `node_modules`, only `bun.lock` | Lockfile inventory, dev/prod filtering |
| `binary-lockfile-only` | Only `bun.lockb` present | Unsupported remediation message |
| `monorepo-workspaces` | Multiple `package.json` under single lock | Workspace member handling |
| `symlink-cycles` | Malformed structure with cycles | Cycle detection, no infinite loops |
## Configuration
### Environment Variables
| Variable | Default | Description |
|----------|---------|-------------|
| `STELLAOPS_BUN_MAX_FILES` | 50000 | Max files per root |
| `STELLAOPS_BUN_MAX_SYMLINK_DEPTH` | 32 | Max symlink traversal depth |
| `STELLAOPS_BUN_INCLUDE_DEV` | true | Include dev dependencies |
| `STELLAOPS_BUN_TIMEOUT_SECONDS` | 60 | Per-root analysis timeout |
### appsettings.json
```json
{
"Scanner": {
"Analyzers": {
"Bun": {
"MaxFilesPerRoot": 50000,
"MaxSymlinkDepth": 32,
"IncludeDevDependencies": true,
"TimeoutSeconds": 60
}
}
}
}
```
## Determinism Requirements
1. **Sorted Output**: Packages ordered by `(name, version)` tuple
2. **Stable IDs**: Component keys computed as `sha256(analyzerId + purl)`
3. **Reproducible Evidence**: Evidence ordered by `(kind, source, locator)`
4. **No Timestamps**: Evidence does not include file modification times
5. **Canonical Paths**: All paths normalized (forward slashes, no trailing slash)
## Open Decisions
1. **Dev Dependency Default**: Currently `include_dev: true` for lockfile-only scans — confirm with Policy Guild
2. **Workspace Handling**: Whether to emit separate inventory per workspace or merged — await monorepo fixture results
3. **PURL Qualifier**: Using `?package_manager=bun` vs no qualifier — coordinate with Concelier linkset resolution
## Handoff
This document serves as the PREP artifact for PREP-SCANNER-BUN-001-DESIGN-DOC. Update upon:
- Policy Guild confirmation of dev dependency defaults
- Concelier Guild decision on PURL qualifier handling
- Fixture suite completion revealing edge cases

View File

@@ -0,0 +1,502 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/export-profiles.schema.json",
"title": "StellaOps Export Profiles Schema",
"description": "Schema for CLI export profiles, scheduling, and distribution configuration. Unblocks CLI-EXPORT-35-001.",
"type": "object",
"definitions": {
"ExportProfile": {
"type": "object",
"required": ["profile_id", "name", "format", "created_at"],
"properties": {
"profile_id": {
"type": "string",
"format": "uuid",
"description": "Unique identifier for the export profile"
},
"name": {
"type": "string",
"minLength": 1,
"maxLength": 128,
"description": "Human-readable profile name"
},
"description": {
"type": "string",
"maxLength": 512
},
"format": {
"$ref": "#/definitions/ExportFormat"
},
"filters": {
"$ref": "#/definitions/ExportFilters"
},
"schedule": {
"$ref": "#/definitions/ExportSchedule"
},
"distribution": {
"$ref": "#/definitions/Distribution"
},
"retention": {
"$ref": "#/definitions/RetentionPolicy"
},
"signing": {
"$ref": "#/definitions/SigningConfig"
},
"metadata": {
"type": "object",
"additionalProperties": true
},
"enabled": {
"type": "boolean",
"default": true
},
"tenant_id": {
"type": "string",
"format": "uuid"
},
"created_at": {
"type": "string",
"format": "date-time"
},
"updated_at": {
"type": "string",
"format": "date-time"
},
"created_by": {
"type": "string"
}
}
},
"ExportFormat": {
"type": "object",
"required": ["type"],
"properties": {
"type": {
"type": "string",
"enum": ["sbom", "vex", "attestation", "evidence", "risk-report", "compliance-report", "airgap-bundle"]
},
"variant": {
"type": "string",
"enum": ["cyclonedx-1.6", "spdx-3.0.1", "openvex", "csaf-vex", "in-toto", "dsse", "json", "csv", "pdf"],
"description": "Format variant for the export type"
},
"options": {
"type": "object",
"properties": {
"include_signatures": {
"type": "boolean",
"default": true
},
"include_provenance": {
"type": "boolean",
"default": false
},
"include_rekor_receipts": {
"type": "boolean",
"default": false
},
"compress": {
"type": "boolean",
"default": true
},
"compression_algorithm": {
"type": "string",
"enum": ["gzip", "zstd", "none"],
"default": "gzip"
}
}
}
}
},
"ExportFilters": {
"type": "object",
"description": "Filters to apply when selecting data for export",
"properties": {
"date_range": {
"type": "object",
"properties": {
"from": {
"type": "string",
"format": "date-time"
},
"to": {
"type": "string",
"format": "date-time"
},
"relative": {
"type": "string",
"pattern": "^-?[0-9]+[hdwmy]$",
"description": "Relative time range (e.g., -7d for last 7 days)"
}
}
},
"severity": {
"type": "array",
"items": {
"type": "string",
"enum": ["critical", "high", "medium", "low", "info", "unknown"]
}
},
"vex_status": {
"type": "array",
"items": {
"type": "string",
"enum": ["affected", "not_affected", "fixed", "under_investigation"]
}
},
"components": {
"type": "array",
"items": {
"type": "string"
},
"description": "PURL patterns to include"
},
"exclude_components": {
"type": "array",
"items": {
"type": "string"
},
"description": "PURL patterns to exclude"
},
"cve_ids": {
"type": "array",
"items": {
"type": "string",
"pattern": "^CVE-[0-9]{4}-[0-9]+$"
}
},
"tags": {
"type": "array",
"items": {
"type": "string"
}
},
"environments": {
"type": "array",
"items": {
"type": "string"
}
}
}
},
"ExportSchedule": {
"type": "object",
"description": "Schedule for automated exports",
"properties": {
"enabled": {
"type": "boolean",
"default": false
},
"cron": {
"type": "string",
"pattern": "^(@(annually|yearly|monthly|weekly|daily|hourly))|((\\*|[0-9,\\-\\/]+)\\s+){4,5}(\\*|[0-9,\\-\\/]+)$",
"description": "Cron expression for scheduling (5 or 6 fields)"
},
"timezone": {
"type": "string",
"default": "UTC",
"description": "IANA timezone identifier"
},
"next_run": {
"type": "string",
"format": "date-time",
"readOnly": true
},
"last_run": {
"type": "string",
"format": "date-time",
"readOnly": true
},
"last_status": {
"type": "string",
"enum": ["success", "partial", "failed", "pending"],
"readOnly": true
}
}
},
"Distribution": {
"type": "object",
"description": "Distribution targets for exports",
"properties": {
"targets": {
"type": "array",
"items": {
"$ref": "#/definitions/DistributionTarget"
}
},
"notify_on_completion": {
"type": "boolean",
"default": true
},
"notify_on_failure": {
"type": "boolean",
"default": true
}
}
},
"DistributionTarget": {
"type": "object",
"required": ["type"],
"properties": {
"type": {
"type": "string",
"enum": ["s3", "azure-blob", "gcs", "sftp", "webhook", "email", "local"]
},
"name": {
"type": "string"
},
"enabled": {
"type": "boolean",
"default": true
},
"config": {
"type": "object",
"description": "Target-specific configuration",
"additionalProperties": true
}
},
"allOf": [
{
"if": {
"properties": { "type": { "const": "s3" } }
},
"then": {
"properties": {
"config": {
"type": "object",
"required": ["bucket", "region"],
"properties": {
"bucket": { "type": "string" },
"region": { "type": "string" },
"prefix": { "type": "string" },
"credentials_secret": { "type": "string" }
}
}
}
}
},
{
"if": {
"properties": { "type": { "const": "webhook" } }
},
"then": {
"properties": {
"config": {
"type": "object",
"required": ["url"],
"properties": {
"url": { "type": "string", "format": "uri" },
"method": { "type": "string", "enum": ["POST", "PUT"], "default": "POST" },
"headers": { "type": "object", "additionalProperties": { "type": "string" } },
"auth_secret": { "type": "string" }
}
}
}
}
}
]
},
"RetentionPolicy": {
"type": "object",
"description": "Retention policy for exported artifacts",
"properties": {
"max_age_days": {
"type": "integer",
"minimum": 1,
"maximum": 3650,
"default": 365
},
"max_count": {
"type": "integer",
"minimum": 1,
"description": "Maximum number of exports to retain"
},
"delete_on_success": {
"type": "boolean",
"default": false,
"description": "Delete source data after successful export"
}
}
},
"SigningConfig": {
"type": "object",
"description": "Signing configuration for exports",
"properties": {
"enabled": {
"type": "boolean",
"default": true
},
"key_id": {
"type": "string",
"description": "Key identifier for signing"
},
"algorithm": {
"type": "string",
"enum": ["ES256", "RS256", "EdDSA"],
"default": "ES256"
},
"include_rekor": {
"type": "boolean",
"default": false,
"description": "Include Rekor transparency log receipt"
},
"timestamp_authority": {
"type": "string",
"format": "uri",
"description": "RFC 3161 timestamp authority URL"
}
}
},
"ExportJob": {
"type": "object",
"description": "Export job status",
"required": ["job_id", "profile_id", "status", "created_at"],
"properties": {
"job_id": {
"type": "string",
"format": "uuid"
},
"profile_id": {
"type": "string",
"format": "uuid"
},
"status": {
"type": "string",
"enum": ["pending", "running", "success", "partial", "failed", "cancelled"]
},
"progress": {
"type": "object",
"properties": {
"percent": {
"type": "integer",
"minimum": 0,
"maximum": 100
},
"items_processed": {
"type": "integer"
},
"items_total": {
"type": "integer"
}
}
},
"artifacts": {
"type": "array",
"items": {
"$ref": "#/definitions/ExportArtifact"
}
},
"errors": {
"type": "array",
"items": {
"type": "string"
}
},
"created_at": {
"type": "string",
"format": "date-time"
},
"started_at": {
"type": "string",
"format": "date-time"
},
"completed_at": {
"type": "string",
"format": "date-time"
}
}
},
"ExportArtifact": {
"type": "object",
"required": ["artifact_id", "digest", "size"],
"properties": {
"artifact_id": {
"type": "string",
"format": "uuid"
},
"filename": {
"type": "string"
},
"digest": {
"type": "string",
"pattern": "^sha256:[a-f0-9]{64}$"
},
"size": {
"type": "integer",
"description": "Size in bytes"
},
"format": {
"type": "string"
},
"signature": {
"type": "string",
"description": "Base64-encoded signature"
},
"download_url": {
"type": "string",
"format": "uri"
},
"expires_at": {
"type": "string",
"format": "date-time"
}
}
}
},
"properties": {
"profiles": {
"type": "array",
"items": {
"$ref": "#/definitions/ExportProfile"
}
}
},
"examples": [
{
"profiles": [
{
"profile_id": "550e8400-e29b-41d4-a716-446655440001",
"name": "Weekly SBOM Export",
"description": "Export all SBOMs in CycloneDX format weekly",
"format": {
"type": "sbom",
"variant": "cyclonedx-1.6",
"options": {
"include_signatures": true,
"compress": true
}
},
"filters": {
"date_range": {
"relative": "-7d"
}
},
"schedule": {
"enabled": true,
"cron": "0 2 * * 0",
"timezone": "UTC"
},
"distribution": {
"targets": [
{
"type": "s3",
"name": "compliance-bucket",
"config": {
"bucket": "company-compliance-exports",
"region": "us-east-1",
"prefix": "sboms/"
}
}
]
},
"retention": {
"max_age_days": 365,
"max_count": 52
},
"enabled": true,
"created_at": "2025-12-01T00:00:00Z"
}
]
}
]
}

View File

@@ -0,0 +1,605 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/notify-rules.schema.json",
"title": "StellaOps Notification Rules Schema",
"description": "Schema for notification rules, webhook payloads, and digest formats. Unblocks CLI-NOTIFY-38-001.",
"type": "object",
"definitions": {
"NotifyRule": {
"type": "object",
"required": ["rule_id", "name", "event_types", "channels", "created_at"],
"properties": {
"rule_id": {
"type": "string",
"format": "uuid",
"description": "Unique identifier for the notification rule"
},
"name": {
"type": "string",
"minLength": 1,
"maxLength": 128,
"description": "Human-readable rule name"
},
"description": {
"type": "string",
"maxLength": 512
},
"event_types": {
"type": "array",
"minItems": 1,
"items": {
"$ref": "#/definitions/EventType"
},
"description": "Event types that trigger this rule"
},
"filters": {
"$ref": "#/definitions/NotifyFilters"
},
"channels": {
"type": "array",
"minItems": 1,
"items": {
"$ref": "#/definitions/NotifyChannel"
}
},
"throttle": {
"$ref": "#/definitions/ThrottleConfig"
},
"digest": {
"$ref": "#/definitions/DigestConfig"
},
"templates": {
"$ref": "#/definitions/NotifyTemplates"
},
"enabled": {
"type": "boolean",
"default": true
},
"priority": {
"type": "integer",
"minimum": 0,
"maximum": 100,
"default": 50,
"description": "Rule priority (higher = processed first)"
},
"tenant_id": {
"type": "string",
"format": "uuid"
},
"created_at": {
"type": "string",
"format": "date-time"
},
"updated_at": {
"type": "string",
"format": "date-time"
},
"created_by": {
"type": "string"
}
}
},
"EventType": {
"type": "string",
"enum": [
"vulnerability.new",
"vulnerability.updated",
"vulnerability.resolved",
"vulnerability.critical",
"vex.status_changed",
"vex.consensus_changed",
"policy.violation",
"policy.override_requested",
"policy.override_approved",
"policy.override_expired",
"scan.completed",
"scan.failed",
"attestation.created",
"attestation.verification_failed",
"airgap.staleness_warning",
"airgap.staleness_critical",
"airgap.bundle_imported",
"export.completed",
"export.failed",
"system.health_degraded",
"system.error"
]
},
"NotifyFilters": {
"type": "object",
"description": "Filters to apply before triggering notification",
"properties": {
"severity": {
"type": "array",
"items": {
"type": "string",
"enum": ["critical", "high", "medium", "low", "info"]
},
"description": "Only trigger for these severities"
},
"cvss_minimum": {
"type": "number",
"minimum": 0,
"maximum": 10,
"description": "Minimum CVSS score to trigger"
},
"components": {
"type": "array",
"items": {
"type": "string"
},
"description": "PURL patterns to match"
},
"environments": {
"type": "array",
"items": {
"type": "string"
}
},
"tags": {
"type": "array",
"items": {
"type": "string"
}
},
"kev_only": {
"type": "boolean",
"default": false,
"description": "Only trigger for Known Exploited Vulnerabilities"
},
"fix_available": {
"type": "boolean",
"description": "Filter by fix availability"
}
}
},
"NotifyChannel": {
"type": "object",
"required": ["type"],
"properties": {
"type": {
"type": "string",
"enum": ["email", "slack", "teams", "webhook", "pagerduty", "opsgenie", "sns"]
},
"name": {
"type": "string"
},
"enabled": {
"type": "boolean",
"default": true
},
"config": {
"type": "object",
"additionalProperties": true
}
},
"allOf": [
{
"if": { "properties": { "type": { "const": "email" } } },
"then": {
"properties": {
"config": {
"type": "object",
"required": ["recipients"],
"properties": {
"recipients": {
"type": "array",
"items": { "type": "string", "format": "email" }
},
"cc": {
"type": "array",
"items": { "type": "string", "format": "email" }
},
"subject_prefix": { "type": "string" }
}
}
}
}
},
{
"if": { "properties": { "type": { "const": "slack" } } },
"then": {
"properties": {
"config": {
"type": "object",
"required": ["webhook_url"],
"properties": {
"webhook_url": { "type": "string", "format": "uri" },
"channel": { "type": "string" },
"username": { "type": "string" },
"icon_emoji": { "type": "string" }
}
}
}
}
},
{
"if": { "properties": { "type": { "const": "teams" } } },
"then": {
"properties": {
"config": {
"type": "object",
"required": ["webhook_url"],
"properties": {
"webhook_url": { "type": "string", "format": "uri" }
}
}
}
}
},
{
"if": { "properties": { "type": { "const": "webhook" } } },
"then": {
"properties": {
"config": {
"type": "object",
"required": ["url"],
"properties": {
"url": { "type": "string", "format": "uri" },
"method": { "type": "string", "enum": ["POST", "PUT"], "default": "POST" },
"headers": { "type": "object", "additionalProperties": { "type": "string" } },
"auth_type": { "type": "string", "enum": ["none", "basic", "bearer", "hmac"] },
"auth_secret": { "type": "string" },
"retry_count": { "type": "integer", "minimum": 0, "maximum": 5, "default": 3 },
"timeout_seconds": { "type": "integer", "minimum": 1, "maximum": 60, "default": 30 }
}
}
}
}
},
{
"if": { "properties": { "type": { "const": "pagerduty" } } },
"then": {
"properties": {
"config": {
"type": "object",
"required": ["routing_key"],
"properties": {
"routing_key": { "type": "string" },
"severity_mapping": {
"type": "object",
"additionalProperties": { "type": "string", "enum": ["critical", "error", "warning", "info"] }
}
}
}
}
}
}
]
},
"ThrottleConfig": {
"type": "object",
"description": "Throttling configuration to prevent notification storms",
"properties": {
"enabled": {
"type": "boolean",
"default": true
},
"max_per_hour": {
"type": "integer",
"minimum": 1,
"default": 100
},
"max_per_day": {
"type": "integer",
"minimum": 1,
"default": 1000
},
"dedupe_window_seconds": {
"type": "integer",
"minimum": 0,
"default": 300,
"description": "Window for deduplicating identical notifications"
},
"dedupe_key_fields": {
"type": "array",
"items": { "type": "string" },
"default": ["event_type", "cve_id", "purl"],
"description": "Fields to use for deduplication key"
}
}
},
"DigestConfig": {
"type": "object",
"description": "Configuration for digest/summary notifications",
"properties": {
"enabled": {
"type": "boolean",
"default": false
},
"frequency": {
"type": "string",
"enum": ["hourly", "daily", "weekly"],
"default": "daily"
},
"schedule": {
"type": "string",
"description": "Cron expression for digest delivery"
},
"timezone": {
"type": "string",
"default": "UTC"
},
"min_events": {
"type": "integer",
"minimum": 1,
"default": 1,
"description": "Minimum events required to send digest"
},
"group_by": {
"type": "array",
"items": {
"type": "string",
"enum": ["severity", "event_type", "component", "environment"]
},
"description": "Fields to group events by in digest"
},
"include_summary": {
"type": "boolean",
"default": true
},
"include_details": {
"type": "boolean",
"default": false,
"description": "Include full event details in digest"
}
}
},
"NotifyTemplates": {
"type": "object",
"description": "Custom notification templates",
"properties": {
"subject": {
"type": "string",
"description": "Template for notification subject (supports {{variables}})"
},
"body": {
"type": "string",
"description": "Template for notification body"
},
"body_html": {
"type": "string",
"description": "HTML template for email body"
}
}
},
"WebhookPayload": {
"type": "object",
"description": "Standard webhook payload format",
"required": ["id", "timestamp", "event_type", "data"],
"properties": {
"id": {
"type": "string",
"format": "uuid",
"description": "Unique notification ID"
},
"timestamp": {
"type": "string",
"format": "date-time"
},
"event_type": {
"$ref": "#/definitions/EventType"
},
"version": {
"type": "string",
"default": "1.0.0"
},
"tenant_id": {
"type": "string",
"format": "uuid"
},
"data": {
"type": "object",
"description": "Event-specific payload data",
"additionalProperties": true
},
"metadata": {
"type": "object",
"properties": {
"rule_id": { "type": "string", "format": "uuid" },
"rule_name": { "type": "string" },
"retry_count": { "type": "integer" },
"digest_id": { "type": "string", "format": "uuid" }
}
}
}
},
"DigestPayload": {
"type": "object",
"description": "Digest/summary notification payload",
"required": ["id", "timestamp", "period", "summary"],
"properties": {
"id": {
"type": "string",
"format": "uuid"
},
"timestamp": {
"type": "string",
"format": "date-time"
},
"period": {
"type": "object",
"required": ["start", "end"],
"properties": {
"start": { "type": "string", "format": "date-time" },
"end": { "type": "string", "format": "date-time" }
}
},
"summary": {
"type": "object",
"properties": {
"total_events": { "type": "integer" },
"by_severity": {
"type": "object",
"additionalProperties": { "type": "integer" }
},
"by_event_type": {
"type": "object",
"additionalProperties": { "type": "integer" }
},
"new_vulnerabilities": { "type": "integer" },
"resolved_vulnerabilities": { "type": "integer" },
"policy_violations": { "type": "integer" }
}
},
"events": {
"type": "array",
"items": {
"$ref": "#/definitions/WebhookPayload"
},
"description": "Optional detailed event list"
},
"groups": {
"type": "array",
"items": {
"type": "object",
"properties": {
"key": { "type": "string" },
"count": { "type": "integer" },
"sample_events": {
"type": "array",
"items": { "$ref": "#/definitions/WebhookPayload" }
}
}
}
}
}
},
"NotifySimulationRequest": {
"type": "object",
"description": "Request to simulate a notification rule",
"required": ["event"],
"properties": {
"rule_id": {
"type": "string",
"format": "uuid",
"description": "Rule to simulate (optional, uses all matching if not specified)"
},
"event": {
"$ref": "#/definitions/WebhookPayload"
},
"dry_run": {
"type": "boolean",
"default": true,
"description": "If true, don't actually send notifications"
}
}
},
"NotifySimulationResult": {
"type": "object",
"required": ["matched_rules", "would_notify"],
"properties": {
"matched_rules": {
"type": "array",
"items": {
"type": "object",
"properties": {
"rule_id": { "type": "string", "format": "uuid" },
"rule_name": { "type": "string" },
"matched": { "type": "boolean" },
"reason": { "type": "string" }
}
}
},
"would_notify": {
"type": "array",
"items": {
"type": "object",
"properties": {
"channel_type": { "type": "string" },
"channel_name": { "type": "string" },
"payload_preview": { "type": "object" }
}
}
},
"throttled": {
"type": "boolean"
},
"throttle_reason": {
"type": "string"
}
}
},
"NotifyAckToken": {
"type": "object",
"description": "Acknowledgement token for notifications",
"required": ["token", "notification_id", "expires_at"],
"properties": {
"token": {
"type": "string",
"description": "Opaque acknowledgement token"
},
"notification_id": {
"type": "string",
"format": "uuid"
},
"event_type": {
"$ref": "#/definitions/EventType"
},
"expires_at": {
"type": "string",
"format": "date-time"
},
"ack_url": {
"type": "string",
"format": "uri",
"description": "URL to acknowledge the notification"
}
}
}
},
"properties": {
"rules": {
"type": "array",
"items": {
"$ref": "#/definitions/NotifyRule"
}
}
},
"examples": [
{
"rules": [
{
"rule_id": "550e8400-e29b-41d4-a716-446655440002",
"name": "Critical Vulnerability Alert",
"description": "Immediate notification for critical vulnerabilities",
"event_types": ["vulnerability.critical", "vulnerability.new"],
"filters": {
"severity": ["critical"],
"kev_only": false
},
"channels": [
{
"type": "slack",
"name": "security-alerts",
"config": {
"webhook_url": "https://hooks.slack.com/services/xxx",
"channel": "#security-alerts",
"icon_emoji": ":warning:"
}
},
{
"type": "pagerduty",
"name": "security-oncall",
"config": {
"routing_key": "xxx",
"severity_mapping": {
"critical": "critical",
"high": "error"
}
}
}
],
"throttle": {
"enabled": true,
"max_per_hour": 50,
"dedupe_window_seconds": 300
},
"enabled": true,
"priority": 100,
"created_at": "2025-12-01T00:00:00Z"
}
]
}
]
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,564 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/reachability-input.schema.json",
"title": "StellaOps Reachability Input Schema",
"description": "Schema for reachability/exploitability signals input to Policy Engine. Unblocks POLICY-ENGINE-80-001, POLICY-RISK-66-003.",
"type": "object",
"definitions": {
"ReachabilityInput": {
"type": "object",
"description": "Input payload for policy engine reachability evaluation",
"required": ["subject", "reachability_facts", "timestamp"],
"properties": {
"subject": {
"$ref": "#/definitions/Subject"
},
"reachability_facts": {
"type": "array",
"items": {
"$ref": "#/definitions/ReachabilityFact"
}
},
"exploitability_facts": {
"type": "array",
"items": {
"$ref": "#/definitions/ExploitabilityFact"
}
},
"callgraph_refs": {
"type": "array",
"items": {
"$ref": "#/definitions/CallgraphRef"
}
},
"runtime_facts": {
"type": "array",
"items": {
"$ref": "#/definitions/RuntimeFact"
}
},
"entropy_score": {
"$ref": "#/definitions/EntropyScore"
},
"timestamp": {
"type": "string",
"format": "date-time"
},
"metadata": {
"type": "object",
"additionalProperties": true
}
}
},
"Subject": {
"type": "object",
"description": "Subject being evaluated (component + vulnerability)",
"required": ["purl"],
"properties": {
"purl": {
"type": "string",
"description": "Package URL of the component"
},
"cve_id": {
"type": "string",
"pattern": "^CVE-[0-9]{4}-[0-9]+$"
},
"ghsa_id": {
"type": "string",
"pattern": "^GHSA-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}$"
},
"vulnerability_id": {
"type": "string",
"description": "Internal vulnerability identifier"
},
"affected_symbols": {
"type": "array",
"items": {
"type": "string"
},
"description": "Vulnerable symbols/functions in the component"
},
"version_range": {
"type": "string",
"description": "Affected version range (e.g., '<1.2.3')"
}
}
},
"ReachabilityFact": {
"type": "object",
"description": "Static reachability analysis result",
"required": ["state", "confidence"],
"properties": {
"state": {
"type": "string",
"enum": ["reachable", "unreachable", "potentially_reachable", "unknown"],
"description": "Reachability state"
},
"confidence": {
"type": "number",
"minimum": 0,
"maximum": 1,
"description": "Confidence score (0-1)"
},
"source": {
"type": "string",
"enum": ["static_analysis", "dynamic_analysis", "sbom_inference", "manual", "external"],
"description": "Source of the reachability determination"
},
"analyzer": {
"type": "string",
"description": "Analyzer tool that produced this fact"
},
"analyzer_version": {
"type": "string"
},
"call_path": {
"$ref": "#/definitions/CallPath"
},
"entry_points": {
"type": "array",
"items": {
"$ref": "#/definitions/EntryPoint"
}
},
"evidence": {
"$ref": "#/definitions/ReachabilityEvidence"
},
"evaluated_at": {
"type": "string",
"format": "date-time"
}
}
},
"CallPath": {
"type": "object",
"description": "Call path from entry point to vulnerable symbol",
"properties": {
"depth": {
"type": "integer",
"minimum": 0,
"description": "Call depth from entry point"
},
"nodes": {
"type": "array",
"items": {
"$ref": "#/definitions/CallNode"
}
},
"edges": {
"type": "array",
"items": {
"$ref": "#/definitions/CallEdge"
}
}
}
},
"CallNode": {
"type": "object",
"required": ["id", "symbol"],
"properties": {
"id": {
"type": "string"
},
"symbol": {
"type": "string",
"description": "Fully qualified symbol name"
},
"file": {
"type": "string"
},
"line": {
"type": "integer"
},
"package": {
"type": "string"
},
"is_vulnerable": {
"type": "boolean"
},
"is_entry_point": {
"type": "boolean"
}
}
},
"CallEdge": {
"type": "object",
"required": ["source", "target"],
"properties": {
"source": {
"type": "string"
},
"target": {
"type": "string"
},
"call_type": {
"type": "string",
"enum": ["direct", "indirect", "virtual", "reflection", "dynamic"]
}
}
},
"EntryPoint": {
"type": "object",
"description": "Application entry point that can reach vulnerable code",
"required": ["type", "identifier"],
"properties": {
"type": {
"type": "string",
"enum": ["http_endpoint", "grpc_method", "cli_command", "event_handler", "scheduled_job", "main", "test"]
},
"identifier": {
"type": "string",
"description": "Entry point identifier (e.g., 'POST /api/users')"
},
"file": {
"type": "string"
},
"line": {
"type": "integer"
},
"exposed": {
"type": "boolean",
"default": true,
"description": "Whether this entry point is externally exposed"
},
"authentication_required": {
"type": "boolean"
}
}
},
"ReachabilityEvidence": {
"type": "object",
"description": "Supporting evidence for reachability determination",
"properties": {
"digest": {
"type": "string",
"pattern": "^sha256:[a-f0-9]{64}$"
},
"evidence_uri": {
"type": "string",
"format": "uri"
},
"callgraph_digest": {
"type": "string"
},
"sbom_digest": {
"type": "string"
},
"analysis_log_uri": {
"type": "string",
"format": "uri"
}
}
},
"ExploitabilityFact": {
"type": "object",
"description": "Exploitability assessment",
"required": ["state", "confidence"],
"properties": {
"state": {
"type": "string",
"enum": ["exploitable", "not_exploitable", "conditionally_exploitable", "unknown"]
},
"confidence": {
"type": "number",
"minimum": 0,
"maximum": 1
},
"source": {
"type": "string",
"enum": ["kev", "epss", "vendor_advisory", "internal_analysis", "exploit_db"]
},
"epss_score": {
"type": "number",
"minimum": 0,
"maximum": 1,
"description": "EPSS probability score"
},
"epss_percentile": {
"type": "number",
"minimum": 0,
"maximum": 100
},
"kev_listed": {
"type": "boolean",
"description": "Listed in CISA Known Exploited Vulnerabilities"
},
"kev_due_date": {
"type": "string",
"format": "date"
},
"exploit_maturity": {
"type": "string",
"enum": ["not_defined", "unproven", "poc", "functional", "high"],
"description": "Exploit maturity level (per CVSS)"
},
"exploit_refs": {
"type": "array",
"items": {
"type": "string",
"format": "uri"
}
},
"conditions": {
"type": "array",
"items": {
"$ref": "#/definitions/ExploitCondition"
},
"description": "Conditions required for exploitation"
},
"evaluated_at": {
"type": "string",
"format": "date-time"
}
}
},
"ExploitCondition": {
"type": "object",
"description": "Condition required for exploitation",
"required": ["condition", "met"],
"properties": {
"condition": {
"type": "string",
"description": "Description of the condition"
},
"met": {
"type": "boolean"
},
"confidence": {
"type": "number",
"minimum": 0,
"maximum": 1
},
"evidence": {
"type": "string"
}
}
},
"CallgraphRef": {
"type": "object",
"description": "Reference to a stored callgraph",
"required": ["digest"],
"properties": {
"digest": {
"type": "string",
"pattern": "^sha256:[a-f0-9]{64}$"
},
"format": {
"type": "string",
"enum": ["richgraph-v1", "dot", "json-graph", "sarif"],
"default": "richgraph-v1"
},
"uri": {
"type": "string",
"format": "uri"
},
"generated_at": {
"type": "string",
"format": "date-time"
},
"generator": {
"type": "string"
},
"generator_version": {
"type": "string"
}
}
},
"RuntimeFact": {
"type": "object",
"description": "Runtime observation fact",
"required": ["type", "observed_at"],
"properties": {
"type": {
"type": "string",
"enum": ["function_called", "function_not_called", "path_executed", "path_not_executed", "module_loaded", "module_not_loaded"]
},
"symbol": {
"type": "string"
},
"module": {
"type": "string"
},
"call_count": {
"type": "integer",
"minimum": 0
},
"last_called": {
"type": "string",
"format": "date-time"
},
"observed_at": {
"type": "string",
"format": "date-time"
},
"observation_window": {
"type": "string",
"description": "Duration of observation (e.g., '7d', '30d')"
},
"environment": {
"type": "string",
"enum": ["production", "staging", "development", "test"]
}
}
},
"EntropyScore": {
"type": "object",
"description": "Scanner entropy/trust score for confidence weighting",
"properties": {
"overall": {
"type": "number",
"minimum": 0,
"maximum": 1,
"description": "Overall trust score"
},
"sbom_completeness": {
"type": "number",
"minimum": 0,
"maximum": 1
},
"callgraph_coverage": {
"type": "number",
"minimum": 0,
"maximum": 1
},
"runtime_coverage": {
"type": "number",
"minimum": 0,
"maximum": 1
},
"analyzer_confidence": {
"type": "number",
"minimum": 0,
"maximum": 1
},
"data_freshness": {
"type": "number",
"minimum": 0,
"maximum": 1,
"description": "How recent the underlying data is"
}
}
},
"ReachabilityOutput": {
"type": "object",
"description": "Policy engine output after reachability evaluation",
"required": ["subject", "effective_state", "risk_adjustment"],
"properties": {
"subject": {
"$ref": "#/definitions/Subject"
},
"effective_state": {
"type": "string",
"enum": ["reachable", "unreachable", "potentially_reachable", "unknown"]
},
"effective_exploitability": {
"type": "string",
"enum": ["exploitable", "not_exploitable", "conditionally_exploitable", "unknown"]
},
"risk_adjustment": {
"type": "object",
"properties": {
"factor": {
"type": "number",
"minimum": 0,
"maximum": 2,
"description": "Risk multiplier (0 = suppress, 1 = neutral, >1 = amplify)"
},
"severity_override": {
"type": "string",
"enum": ["critical", "high", "medium", "low", "info"]
},
"justification": {
"type": "string"
}
}
},
"policy_trace": {
"type": "array",
"items": {
"type": "object",
"properties": {
"rule_id": { "type": "string" },
"result": { "type": "string" },
"reason": { "type": "string" }
}
}
},
"evaluated_at": {
"type": "string",
"format": "date-time"
}
}
}
},
"properties": {
"inputs": {
"type": "array",
"items": {
"$ref": "#/definitions/ReachabilityInput"
}
}
},
"examples": [
{
"inputs": [
{
"subject": {
"purl": "pkg:npm/lodash@4.17.20",
"cve_id": "CVE-2021-23337",
"affected_symbols": ["lodash.template"]
},
"reachability_facts": [
{
"state": "reachable",
"confidence": 0.95,
"source": "static_analysis",
"analyzer": "stellaops-scanner",
"analyzer_version": "2025.10.0",
"call_path": {
"depth": 3,
"nodes": [
{ "id": "n1", "symbol": "app.renderTemplate", "is_entry_point": true },
{ "id": "n2", "symbol": "templateEngine.compile" },
{ "id": "n3", "symbol": "lodash.template", "is_vulnerable": true }
],
"edges": [
{ "source": "n1", "target": "n2", "call_type": "direct" },
{ "source": "n2", "target": "n3", "call_type": "direct" }
]
},
"entry_points": [
{
"type": "http_endpoint",
"identifier": "POST /api/render",
"exposed": true,
"authentication_required": true
}
],
"evaluated_at": "2025-12-06T10:00:00Z"
}
],
"exploitability_facts": [
{
"state": "exploitable",
"confidence": 0.8,
"source": "epss",
"epss_score": 0.42,
"epss_percentile": 87,
"kev_listed": false,
"exploit_maturity": "functional",
"evaluated_at": "2025-12-06T10:00:00Z"
}
],
"entropy_score": {
"overall": 0.85,
"sbom_completeness": 0.95,
"callgraph_coverage": 0.78,
"analyzer_confidence": 0.9
},
"timestamp": "2025-12-06T10:00:00Z"
}
]
}
]
}

View File

@@ -0,0 +1,6 @@
global using System;
global using System.Collections.Generic;
global using System.IO;
global using System.Linq;
global using System.Threading;
global using System.Threading.Tasks;

View File

@@ -1,171 +0,0 @@
using StellaOps.Scanner.Surface.Discovery;
namespace StellaOps.Scanner.Surface.Models;
/// <summary>
/// Complete result of surface analysis for a scan.
/// </summary>
public sealed record SurfaceAnalysisResult
{
/// <summary>
/// Scan identifier.
/// </summary>
public required string ScanId { get; init; }
/// <summary>
/// When analysis was performed.
/// </summary>
public required DateTimeOffset Timestamp { get; init; }
/// <summary>
/// Analysis summary statistics.
/// </summary>
public required SurfaceAnalysisSummary Summary { get; init; }
/// <summary>
/// Discovered surface entries.
/// </summary>
public required IReadOnlyList<SurfaceEntry> Entries { get; init; }
/// <summary>
/// Discovered entry points.
/// </summary>
public IReadOnlyList<EntryPoint>? EntryPoints { get; init; }
/// <summary>
/// Analysis metadata.
/// </summary>
public SurfaceAnalysisMetadata? Metadata { get; init; }
}
/// <summary>
/// Summary statistics for surface analysis.
/// </summary>
public sealed record SurfaceAnalysisSummary
{
/// <summary>
/// Total number of surface entries.
/// </summary>
public required int TotalEntries { get; init; }
/// <summary>
/// Entry counts by type.
/// </summary>
public required IReadOnlyDictionary<SurfaceType, int> ByType { get; init; }
/// <summary>
/// Entry counts by confidence level.
/// </summary>
public required IReadOnlyDictionary<ConfidenceLevel, int> ByConfidence { get; init; }
/// <summary>
/// Calculated risk score (0.0 - 1.0).
/// </summary>
public required double RiskScore { get; init; }
/// <summary>
/// High-risk entry count.
/// </summary>
public int HighRiskCount { get; init; }
/// <summary>
/// Total entry points discovered.
/// </summary>
public int? EntryPointCount { get; init; }
/// <summary>
/// Creates summary from entries.
/// </summary>
public static SurfaceAnalysisSummary FromEntries(IReadOnlyList<SurfaceEntry> entries)
{
var byType = entries
.GroupBy(e => e.Type)
.ToDictionary(g => g.Key, g => g.Count());
var byConfidence = entries
.GroupBy(e => e.Confidence)
.ToDictionary(g => g.Key, g => g.Count());
// Calculate risk score based on entry types and confidence
var riskScore = CalculateRiskScore(entries);
var highRiskCount = entries.Count(e =>
e.Type is SurfaceType.ProcessExecution or SurfaceType.CryptoOperation or SurfaceType.SecretAccess ||
e.Confidence == ConfidenceLevel.Verified);
return new SurfaceAnalysisSummary
{
TotalEntries = entries.Count,
ByType = byType,
ByConfidence = byConfidence,
RiskScore = riskScore,
HighRiskCount = highRiskCount
};
}
private static double CalculateRiskScore(IReadOnlyList<SurfaceEntry> entries)
{
if (entries.Count == 0) return 0.0;
var typeWeights = new Dictionary<SurfaceType, double>
{
[SurfaceType.ProcessExecution] = 1.0,
[SurfaceType.SecretAccess] = 0.9,
[SurfaceType.CryptoOperation] = 0.8,
[SurfaceType.DatabaseOperation] = 0.7,
[SurfaceType.Deserialization] = 0.85,
[SurfaceType.DynamicCode] = 0.9,
[SurfaceType.AuthenticationPoint] = 0.6,
[SurfaceType.NetworkEndpoint] = 0.5,
[SurfaceType.InputHandling] = 0.5,
[SurfaceType.ExternalCall] = 0.4,
[SurfaceType.FileOperation] = 0.3
};
var confidenceMultipliers = new Dictionary<ConfidenceLevel, double>
{
[ConfidenceLevel.Low] = 0.5,
[ConfidenceLevel.Medium] = 0.75,
[ConfidenceLevel.High] = 1.0,
[ConfidenceLevel.Verified] = 1.0
};
var totalWeight = entries.Sum(e =>
typeWeights.GetValueOrDefault(e.Type, 0.3) *
confidenceMultipliers.GetValueOrDefault(e.Confidence, 0.5));
// Normalize to 0-1 range (cap at 100 weighted entries)
return Math.Min(1.0, totalWeight / 100.0);
}
}
/// <summary>
/// Metadata about the surface analysis execution.
/// </summary>
public sealed record SurfaceAnalysisMetadata
{
/// <summary>
/// Analysis duration in milliseconds.
/// </summary>
public double DurationMs { get; init; }
/// <summary>
/// Files analyzed count.
/// </summary>
public int FilesAnalyzed { get; init; }
/// <summary>
/// Languages detected.
/// </summary>
public IReadOnlyList<string>? Languages { get; init; }
/// <summary>
/// Frameworks detected.
/// </summary>
public IReadOnlyList<string>? Frameworks { get; init; }
/// <summary>
/// Analysis configuration used.
/// </summary>
public SurfaceAnalysisOptions? Options { get; init; }
}

View File

@@ -1,121 +0,0 @@
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Surface.Models;
namespace StellaOps.Scanner.Surface.Output;
/// <summary>
/// Interface for writing surface analysis results.
/// </summary>
public interface ISurfaceAnalysisWriter
{
/// <summary>
/// Writes analysis result to the specified stream.
/// </summary>
Task WriteAsync(
SurfaceAnalysisResult result,
Stream outputStream,
CancellationToken cancellationToken = default);
/// <summary>
/// Serializes analysis result to JSON string.
/// </summary>
string Serialize(SurfaceAnalysisResult result);
}
/// <summary>
/// Store key for surface analysis results.
/// </summary>
public static class SurfaceAnalysisStoreKeys
{
/// <summary>
/// Key for storing surface analysis in scan artifacts.
/// </summary>
public const string SurfaceAnalysis = "scanner.surface.analysis";
/// <summary>
/// Key for storing surface entries.
/// </summary>
public const string SurfaceEntries = "scanner.surface.entries";
/// <summary>
/// Key for storing entry points.
/// </summary>
public const string EntryPoints = "scanner.surface.entrypoints";
}
/// <summary>
/// Default implementation of surface analysis writer.
/// Uses deterministic JSON serialization.
/// </summary>
public sealed class SurfaceAnalysisWriter : ISurfaceAnalysisWriter
{
private readonly ILogger<SurfaceAnalysisWriter> _logger;
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }
};
private static readonly JsonSerializerOptions PrettyJsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }
};
public SurfaceAnalysisWriter(ILogger<SurfaceAnalysisWriter> logger)
{
_logger = logger;
}
public async Task WriteAsync(
SurfaceAnalysisResult result,
Stream outputStream,
CancellationToken cancellationToken = default)
{
// Sort entries by ID for determinism
var sortedResult = SortResult(result);
await JsonSerializer.SerializeAsync(
outputStream,
sortedResult,
JsonOptions,
cancellationToken);
_logger.LogDebug(
"Wrote surface analysis for scan {ScanId} with {EntryCount} entries",
result.ScanId,
result.Entries.Count);
}
public string Serialize(SurfaceAnalysisResult result)
{
var sortedResult = SortResult(result);
return JsonSerializer.Serialize(sortedResult, PrettyJsonOptions);
}
private static SurfaceAnalysisResult SortResult(SurfaceAnalysisResult result)
{
// Sort entries by ID for deterministic output
var sortedEntries = result.Entries
.OrderBy(e => e.Id)
.ToList();
// Sort entry points by ID if present
var sortedEntryPoints = result.EntryPoints?
.OrderBy(ep => ep.Id)
.ToList();
return result with
{
Entries = sortedEntries,
EntryPoints = sortedEntryPoints
};
}
}

View File

@@ -80,7 +80,7 @@ public sealed class SurfaceAnalysisWriter : ISurfaceAnalysisWriter
var jsonOptions = options.PrettyPrint ? s_prettyJsonOptions : s_jsonOptions;
if (options.WriteToFile && \!string.IsNullOrEmpty(options.OutputPath))
if (options.WriteToFile && !string.IsNullOrEmpty(options.OutputPath))
{
var filePath = Path.Combine(options.OutputPath, $"surface-{result.ScanId}.json");
await using var stream = File.Create(filePath);

View File

@@ -1,153 +0,0 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using StellaOps.Scanner.Surface.Discovery;
using StellaOps.Scanner.Surface.Output;
using StellaOps.Scanner.Surface.Signals;
namespace StellaOps.Scanner.Surface;
/// <summary>
/// Extension methods for registering surface analysis services.
/// </summary>
public static class ServiceCollectionExtensions
{
/// <summary>
/// Adds surface analysis services to the service collection.
/// </summary>
public static IServiceCollection AddSurfaceAnalysis(
this IServiceCollection services,
IConfiguration? configuration = null)
{
// Core services
services.TryAddSingleton<ISurfaceEntryRegistry, SurfaceEntryRegistry>();
services.TryAddSingleton<ISurfaceSignalEmitter, SurfaceSignalEmitter>();
services.TryAddSingleton<ISurfaceAnalysisWriter, SurfaceAnalysisWriter>();
services.TryAddSingleton<ISurfaceAnalyzer, SurfaceAnalyzer>();
// Configure options if configuration provided
if (configuration != null)
{
services.Configure<SurfaceAnalysisOptions>(
configuration.GetSection("Scanner:Surface"));
}
return services;
}
/// <summary>
/// Adds surface analysis services with a signal sink.
/// </summary>
public static IServiceCollection AddSurfaceAnalysis<TSignalSink>(
this IServiceCollection services,
IConfiguration? configuration = null)
where TSignalSink : class, ISurfaceSignalSink
{
services.AddSurfaceAnalysis(configuration);
services.TryAddSingleton<ISurfaceSignalSink, TSignalSink>();
return services;
}
/// <summary>
/// Adds surface analysis services with in-memory signal sink for testing.
/// </summary>
public static IServiceCollection AddSurfaceAnalysisForTesting(this IServiceCollection services)
{
services.AddSurfaceAnalysis();
services.TryAddSingleton<ISurfaceSignalSink, InMemorySurfaceSignalSink>();
return services;
}
/// <summary>
/// Registers a surface entry collector.
/// </summary>
public static IServiceCollection AddSurfaceCollector<TCollector>(this IServiceCollection services)
where TCollector : class, ISurfaceEntryCollector
{
services.AddSingleton<ISurfaceEntryCollector, TCollector>();
return services;
}
/// <summary>
/// Registers multiple surface entry collectors.
/// </summary>
public static IServiceCollection AddSurfaceCollectors(
this IServiceCollection services,
params Type[] collectorTypes)
{
foreach (var type in collectorTypes)
{
if (!typeof(ISurfaceEntryCollector).IsAssignableFrom(type))
{
throw new ArgumentException(
$"Type {type.Name} does not implement ISurfaceEntryCollector",
nameof(collectorTypes));
}
services.AddSingleton(typeof(ISurfaceEntryCollector), type);
}
return services;
}
}
/// <summary>
/// Builder for configuring surface analysis.
/// </summary>
public sealed class SurfaceAnalysisBuilder
{
private readonly IServiceCollection _services;
internal SurfaceAnalysisBuilder(IServiceCollection services)
{
_services = services;
}
/// <summary>
/// Registers a collector.
/// </summary>
public SurfaceAnalysisBuilder AddCollector<TCollector>()
where TCollector : class, ISurfaceEntryCollector
{
_services.AddSurfaceCollector<TCollector>();
return this;
}
/// <summary>
/// Configures a custom signal sink.
/// </summary>
public SurfaceAnalysisBuilder UseSignalSink<TSignalSink>()
where TSignalSink : class, ISurfaceSignalSink
{
_services.TryAddSingleton<ISurfaceSignalSink, TSignalSink>();
return this;
}
/// <summary>
/// Configures options.
/// </summary>
public SurfaceAnalysisBuilder Configure(Action<SurfaceAnalysisOptions> configure)
{
_services.Configure(configure);
return this;
}
}
/// <summary>
/// Extension for fluent builder pattern.
/// </summary>
public static class SurfaceAnalysisBuilderExtensions
{
/// <summary>
/// Adds surface analysis with fluent configuration.
/// </summary>
public static IServiceCollection AddSurfaceAnalysis(
this IServiceCollection services,
Action<SurfaceAnalysisBuilder> configure)
{
services.AddSurfaceAnalysis();
var builder = new SurfaceAnalysisBuilder(services);
configure(builder);
return services;
}
}

View File

@@ -1,177 +0,0 @@
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Surface.Models;
namespace StellaOps.Scanner.Surface.Signals;
/// <summary>
/// Interface for emitting surface analysis signals for policy evaluation.
/// </summary>
public interface ISurfaceSignalEmitter
{
/// <summary>
/// Emits signals for the given analysis result.
/// </summary>
Task EmitAsync(
string scanId,
SurfaceAnalysisResult result,
CancellationToken cancellationToken = default);
/// <summary>
/// Emits custom signals.
/// </summary>
Task EmitAsync(
string scanId,
IDictionary<string, object> signals,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Default implementation of surface signal emitter.
/// Converts analysis results to policy signals.
/// </summary>
public sealed class SurfaceSignalEmitter : ISurfaceSignalEmitter
{
private readonly ILogger<SurfaceSignalEmitter> _logger;
private readonly ISurfaceSignalSink? _sink;
public SurfaceSignalEmitter(
ILogger<SurfaceSignalEmitter> logger,
ISurfaceSignalSink? sink = null)
{
_logger = logger;
_sink = sink;
}
public async Task EmitAsync(
string scanId,
SurfaceAnalysisResult result,
CancellationToken cancellationToken = default)
{
var signals = BuildSignals(result);
await EmitAsync(scanId, signals, cancellationToken);
}
public async Task EmitAsync(
string scanId,
IDictionary<string, object> signals,
CancellationToken cancellationToken = default)
{
_logger.LogDebug(
"Emitting {SignalCount} surface signals for scan {ScanId}",
signals.Count,
scanId);
if (_sink != null)
{
await _sink.WriteAsync(scanId, signals, cancellationToken);
}
else
{
_logger.LogDebug(
"No signal sink configured, signals for scan {ScanId}: {Signals}",
scanId,
string.Join(", ", signals.Select(kv => $"{kv.Key}={kv.Value}")));
}
}
private static Dictionary<string, object> BuildSignals(SurfaceAnalysisResult result)
{
var signals = new Dictionary<string, object>
{
[SurfaceSignalKeys.TotalSurfaceArea] = result.Summary.TotalEntries,
[SurfaceSignalKeys.RiskScore] = result.Summary.RiskScore,
[SurfaceSignalKeys.HighConfidenceCount] = result.Entries
.Count(e => e.Confidence >= ConfidenceLevel.High)
};
// Add counts by type
foreach (var (type, count) in result.Summary.ByType)
{
var key = type switch
{
SurfaceType.NetworkEndpoint => SurfaceSignalKeys.NetworkEndpoints,
SurfaceType.FileOperation => SurfaceSignalKeys.FileOperations,
SurfaceType.ProcessExecution => SurfaceSignalKeys.ProcessSpawns,
SurfaceType.CryptoOperation => SurfaceSignalKeys.CryptoUsage,
SurfaceType.AuthenticationPoint => SurfaceSignalKeys.AuthPoints,
SurfaceType.InputHandling => SurfaceSignalKeys.InputHandlers,
SurfaceType.SecretAccess => SurfaceSignalKeys.SecretAccess,
SurfaceType.ExternalCall => SurfaceSignalKeys.ExternalCalls,
SurfaceType.DatabaseOperation => SurfaceSignalKeys.DatabaseOperations,
SurfaceType.Deserialization => SurfaceSignalKeys.DeserializationPoints,
SurfaceType.DynamicCode => SurfaceSignalKeys.DynamicCodePoints,
_ => $"{SurfaceSignalKeys.Prefix}{type.ToString().ToLowerInvariant()}"
};
signals[key] = count;
}
// Add entry point count if available
if (result.EntryPoints is { Count: > 0 })
{
signals[SurfaceSignalKeys.EntryPointCount] = result.EntryPoints.Count;
}
// Add framework signals if metadata available
if (result.Metadata?.Frameworks is { Count: > 0 } frameworks)
{
foreach (var framework in frameworks)
{
var normalizedName = framework.ToLowerInvariant().Replace(" ", "_").Replace(".", "_");
signals[$"{SurfaceSignalKeys.FrameworkPrefix}{normalizedName}"] = true;
}
}
// Add language signals if metadata available
if (result.Metadata?.Languages is { Count: > 0 } languages)
{
foreach (var language in languages)
{
var normalizedName = language.ToLowerInvariant();
signals[$"{SurfaceSignalKeys.LanguagePrefix}{normalizedName}"] = true;
}
}
return signals;
}
}
/// <summary>
/// Sink for writing surface signals to storage.
/// </summary>
public interface ISurfaceSignalSink
{
/// <summary>
/// Writes signals to storage.
/// </summary>
Task WriteAsync(
string scanId,
IDictionary<string, object> signals,
CancellationToken cancellationToken = default);
}
/// <summary>
/// In-memory signal sink for testing.
/// </summary>
public sealed class InMemorySurfaceSignalSink : ISurfaceSignalSink
{
private readonly Dictionary<string, IDictionary<string, object>> _signals = new();
public IReadOnlyDictionary<string, IDictionary<string, object>> Signals => _signals;
public Task WriteAsync(
string scanId,
IDictionary<string, object> signals,
CancellationToken cancellationToken = default)
{
_signals[scanId] = new Dictionary<string, object>(signals);
return Task.CompletedTask;
}
public IDictionary<string, object>? GetSignals(string scanId)
{
return _signals.TryGetValue(scanId, out var signals) ? signals : null;
}
public void Clear() => _signals.Clear();
}

View File

@@ -1,64 +0,0 @@
namespace StellaOps.Scanner.Surface.Signals;
/// <summary>
/// Standard signal keys for surface analysis policy integration.
/// </summary>
public static class SurfaceSignalKeys
{
/// <summary>Prefix for all surface signals.</summary>
public const string Prefix = "surface.";
/// <summary>Network endpoint count.</summary>
public const string NetworkEndpoints = "surface.network.endpoints";
/// <summary>Exposed port count.</summary>
public const string ExposedPorts = "surface.network.ports";
/// <summary>File operation count.</summary>
public const string FileOperations = "surface.file.operations";
/// <summary>Process spawn count.</summary>
public const string ProcessSpawns = "surface.process.spawns";
/// <summary>Crypto operation count.</summary>
public const string CryptoUsage = "surface.crypto.usage";
/// <summary>Authentication point count.</summary>
public const string AuthPoints = "surface.auth.points";
/// <summary>Input handler count.</summary>
public const string InputHandlers = "surface.input.handlers";
/// <summary>Secret access point count.</summary>
public const string SecretAccess = "surface.secrets.access";
/// <summary>External call count.</summary>
public const string ExternalCalls = "surface.external.calls";
/// <summary>Database operation count.</summary>
public const string DatabaseOperations = "surface.database.operations";
/// <summary>Deserialization point count.</summary>
public const string DeserializationPoints = "surface.deserialization.points";
/// <summary>Dynamic code execution count.</summary>
public const string DynamicCodePoints = "surface.dynamic.code";
/// <summary>Total surface area score.</summary>
public const string TotalSurfaceArea = "surface.total.area";
/// <summary>Overall risk score (0.0-1.0).</summary>
public const string RiskScore = "surface.risk.score";
/// <summary>High-confidence entry count.</summary>
public const string HighConfidenceCount = "surface.high_confidence.count";
/// <summary>Entry point count.</summary>
public const string EntryPointCount = "surface.entry_points.count";
/// <summary>Framework-specific prefix.</summary>
public const string FrameworkPrefix = "surface.framework.";
/// <summary>Language-specific prefix.</summary>
public const string LanguagePrefix = "surface.language.";
}

View File

@@ -16,7 +16,6 @@
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
<PackageReference Include="System.Text.Json" Version="10.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Scanner.Surface.FS\StellaOps.Scanner.Surface.FS.csproj" />

View File

@@ -7,21 +7,41 @@ using StellaOps.Scanner.Surface.Signals;
namespace StellaOps.Scanner.Surface;
/// <summary>
/// Main interface for surface analysis operations.
/// Options for surface analysis execution.
/// </summary>
public sealed record SurfaceAnalysisOptions
{
/// <summary>Collector options.</summary>
public SurfaceCollectorOptions CollectorOptions { get; init; } = new();
/// <summary>Output options.</summary>
public SurfaceOutputOptions OutputOptions { get; init; } = new();
/// <summary>Whether to emit policy signals.</summary>
public bool EmitSignals { get; init; } = true;
/// <summary>Whether to discover entry points.</summary>
public bool DiscoverEntryPoints { get; init; } = true;
/// <summary>Languages to analyze for entry points.</summary>
public IReadOnlySet<string> Languages { get; init; } = new HashSet<string>();
}
/// <summary>
/// Interface for orchestrating surface analysis.
/// </summary>
public interface ISurfaceAnalyzer
{
/// <summary>
/// Performs surface analysis on the given context.
/// </summary>
/// <summary>Runs surface analysis on the specified path.</summary>
Task<SurfaceAnalysisResult> AnalyzeAsync(
SurfaceCollectionContext context,
string scanId,
string rootPath,
SurfaceAnalysisOptions? options = null,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Default implementation of surface analyzer.
/// Coordinates collectors, signal emission, and output writing.
/// Default surface analyzer implementation.
/// </summary>
public sealed class SurfaceAnalyzer : ISurfaceAnalyzer
{
@@ -43,59 +63,152 @@ public sealed class SurfaceAnalyzer : ISurfaceAnalyzer
}
public async Task<SurfaceAnalysisResult> AnalyzeAsync(
SurfaceCollectionContext context,
string scanId,
string rootPath,
SurfaceAnalysisOptions? options = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(context);
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
ArgumentException.ThrowIfNullOrWhiteSpace(rootPath);
options ??= new SurfaceAnalysisOptions();
var startTime = DateTimeOffset.UtcNow;
_logger.LogInformation("Starting surface analysis for scan {ScanId} at {RootPath}", scanId, rootPath);
_logger.LogInformation(
"Starting surface analysis for scan {ScanId} with {FileCount} files",
context.ScanId,
context.Files.Count);
// Collect entries from all applicable collectors
var entries = new List<SurfaceEntry>();
await foreach (var entry in _registry.CollectAllAsync(context, cancellationToken))
var context = new SurfaceCollectorContext
{
entries.Add(entry);
ScanId = scanId,
RootPath = rootPath,
Options = options.CollectorOptions
};
// Collect surface entries
var entries = new List<SurfaceEntry>();
var collectors = _registry.GetCollectors();
_logger.LogDebug("Running {CollectorCount} surface collectors", collectors.Count);
foreach (var collector in collectors)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
await foreach (var entry in collector.CollectAsync(context, cancellationToken))
{
entries.Add(entry);
}
_logger.LogDebug("Collector {CollectorId} found {Count} entries", collector.CollectorId, entries.Count);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Collector {CollectorId} failed", collector.CollectorId);
}
}
_logger.LogDebug(
"Collected {EntryCount} surface entries for scan {ScanId}",
entries.Count,
context.ScanId);
// Collect entry points
var entryPoints = new List<EntryPoint>();
if (options.DiscoverEntryPoints)
{
var epCollectors = _registry.GetEntryPointCollectors();
_logger.LogDebug("Running {Count} entry point collectors", epCollectors.Count);
foreach (var collector in epCollectors)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
await foreach (var ep in collector.CollectAsync(context, cancellationToken))
{
entryPoints.Add(ep);
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Entry point collector {CollectorId} failed", collector.CollectorId);
}
}
}
// Sort entries by ID for determinism
entries.Sort((a, b) => string.Compare(a.Id, b.Id, StringComparison.Ordinal));
entryPoints.Sort((a, b) => string.Compare(a.Id, b.Id, StringComparison.Ordinal));
// Build summary
var summary = SurfaceAnalysisSummary.FromEntries(entries);
var byType = entries.GroupBy(e => e.Type).ToDictionary(g => g.Key, g => g.Count());
var summary = new SurfaceAnalysisSummary
{
TotalEntries = entries.Count,
ByType = byType,
RiskScore = CalculateRiskScore(entries, entryPoints)
};
// Create result
var result = new SurfaceAnalysisResult
{
ScanId = context.ScanId,
ScanId = scanId,
Timestamp = DateTimeOffset.UtcNow,
Summary = summary,
Entries = entries,
Metadata = new SurfaceAnalysisMetadata
{
DurationMs = (DateTimeOffset.UtcNow - startTime).TotalMilliseconds,
FilesAnalyzed = context.Files.Count,
Languages = context.DetectedLanguages,
Frameworks = context.DetectedFrameworks,
Options = context.Options
}
EntryPoints = entryPoints
};
// Emit signals for policy evaluation
await _signalEmitter.EmitAsync(context.ScanId, result, cancellationToken);
// Emit signals
if (options.EmitSignals)
{
var signals = SurfaceSignalEmitter.BuildSignals(result);
await _signalEmitter.EmitAsync(scanId, signals, cancellationToken);
}
// Write output
await _writer.WriteAsync(result, options.OutputOptions, cancellationToken);
_logger.LogInformation(
"Completed surface analysis for scan {ScanId}: {TotalEntries} entries, risk score {RiskScore:F2}",
context.ScanId,
result.Summary.TotalEntries,
result.Summary.RiskScore);
"Surface analysis complete: {EntryCount} entries, {EntryPointCount} entry points, risk score {RiskScore:F2}",
entries.Count, entryPoints.Count, summary.RiskScore);
return result;
}
private static double CalculateRiskScore(IReadOnlyList<SurfaceEntry> entries, IReadOnlyList<EntryPoint> entryPoints)
{
if (entries.Count == 0 && entryPoints.Count == 0)
return 0.0;
// Weight high-risk types more heavily
var riskWeights = new Dictionary<SurfaceType, double>
{
[SurfaceType.SecretAccess] = 1.0,
[SurfaceType.AuthenticationPoint] = 0.9,
[SurfaceType.ProcessExecution] = 0.8,
[SurfaceType.CryptoOperation] = 0.7,
[SurfaceType.ExternalCall] = 0.6,
[SurfaceType.NetworkEndpoint] = 0.5,
[SurfaceType.InputHandling] = 0.5,
[SurfaceType.FileOperation] = 0.3
};
double totalWeight = 0;
double weightedSum = 0;
foreach (var entry in entries)
{
var weight = riskWeights.GetValueOrDefault(entry.Type, 0.5);
var confidence = entry.Confidence switch
{
ConfidenceLevel.VeryHigh => 1.0,
ConfidenceLevel.High => 0.8,
ConfidenceLevel.Medium => 0.5,
ConfidenceLevel.Low => 0.2,
_ => 0.5
};
weightedSum += weight * confidence;
totalWeight += 1.0;
}
// Entry points add to risk
weightedSum += entryPoints.Count * 0.3;
totalWeight += entryPoints.Count * 0.5;
return totalWeight > 0 ? Math.Min(1.0, weightedSum / totalWeight) : 0.0;
}
}

View File

@@ -0,0 +1,41 @@
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Scanner.Surface.Discovery;
using StellaOps.Scanner.Surface.Output;
using StellaOps.Scanner.Surface.Signals;
namespace StellaOps.Scanner.Surface;
/// <summary>
/// DI registration extensions for Scanner Surface analysis.
/// </summary>
public static class SurfaceServiceCollectionExtensions
{
/// <summary>Adds surface analysis services to the service collection.</summary>
public static IServiceCollection AddSurfaceAnalysis(this IServiceCollection services)
{
ArgumentNullException.ThrowIfNull(services);
services.AddSingleton<ISurfaceEntryRegistry, SurfaceEntryRegistry>();
services.AddSingleton<ISurfaceSignalEmitter, SurfaceSignalEmitter>();
services.AddSingleton<ISurfaceAnalysisWriter, SurfaceAnalysisWriter>();
services.AddSingleton<ISurfaceAnalyzer, SurfaceAnalyzer>();
return services;
}
/// <summary>Adds a surface entry collector.</summary>
public static IServiceCollection AddSurfaceCollector<T>(this IServiceCollection services)
where T : class, ISurfaceEntryCollector
{
services.AddSingleton<ISurfaceEntryCollector, T>();
return services;
}
/// <summary>Adds an entry point collector.</summary>
public static IServiceCollection AddEntryPointCollector<T>(this IServiceCollection services)
where T : class, IEntryPointCollector
{
services.AddSingleton<IEntryPointCollector, T>();
return services;
}
}

View File

@@ -1,82 +0,0 @@
using System.Collections.Generic;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Repositories;
namespace StellaOps.Scheduler.WebService.GraphJobs;
internal sealed class MongoGraphJobStore : IGraphJobStore
{
private readonly IGraphJobRepository _repository;
public MongoGraphJobStore(IGraphJobRepository repository)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
}
public async ValueTask<GraphBuildJob> AddAsync(GraphBuildJob job, CancellationToken cancellationToken)
{
await _repository.InsertAsync(job, cancellationToken);
return job;
}
public async ValueTask<GraphOverlayJob> AddAsync(GraphOverlayJob job, CancellationToken cancellationToken)
{
await _repository.InsertAsync(job, cancellationToken);
return job;
}
public async ValueTask<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken)
{
var normalized = query.Normalize();
var builds = normalized.Type is null or GraphJobQueryType.Build
? await _repository.ListBuildJobsAsync(tenantId, normalized.Status, normalized.Limit ?? 50, cancellationToken)
: Array.Empty<GraphBuildJob>();
var overlays = normalized.Type is null or GraphJobQueryType.Overlay
? await _repository.ListOverlayJobsAsync(tenantId, normalized.Status, normalized.Limit ?? 50, cancellationToken)
: Array.Empty<GraphOverlayJob>();
return GraphJobCollection.From(builds, overlays);
}
public async ValueTask<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
=> await _repository.GetBuildJobAsync(tenantId, jobId, cancellationToken);
public async ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
=> await _repository.GetOverlayJobAsync(tenantId, jobId, cancellationToken);
public async ValueTask<GraphJobUpdateResult<GraphBuildJob>> UpdateAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
{
if (await _repository.TryReplaceAsync(job, expectedStatus, cancellationToken).ConfigureAwait(false))
{
return GraphJobUpdateResult<GraphBuildJob>.UpdatedResult(job);
}
var existing = await _repository.GetBuildJobAsync(job.TenantId, job.Id, cancellationToken).ConfigureAwait(false);
if (existing is null)
{
throw new KeyNotFoundException($"Graph build job '{job.Id}' not found.");
}
return GraphJobUpdateResult<GraphBuildJob>.NotUpdated(existing);
}
public async ValueTask<GraphJobUpdateResult<GraphOverlayJob>> UpdateAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
{
if (await _repository.TryReplaceOverlayAsync(job, expectedStatus, cancellationToken).ConfigureAwait(false))
{
return GraphJobUpdateResult<GraphOverlayJob>.UpdatedResult(job);
}
var existing = await _repository.GetOverlayJobAsync(job.TenantId, job.Id, cancellationToken).ConfigureAwait(false);
if (existing is null)
{
throw new KeyNotFoundException($"Graph overlay job '{job.Id}' not found.");
}
return GraphJobUpdateResult<GraphOverlayJob>.NotUpdated(existing);
}
public async ValueTask<IReadOnlyCollection<GraphOverlayJob>> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken)
=> await _repository.ListOverlayJobsAsync(tenantId, cancellationToken);
}

View File

@@ -5,7 +5,7 @@ using System.ComponentModel.DataAnnotations;
using System.Linq;
using Microsoft.Extensions.Logging;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Repositories;
using StellaOps.Scheduler.Storage.Postgres.Repositories;
using StellaOps.Scheduler.WebService;
namespace StellaOps.Scheduler.WebService.PolicyRuns;

View File

@@ -6,7 +6,7 @@ using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Repositories;
using StellaOps.Scheduler.Storage.Postgres.Repositories;
namespace StellaOps.Scheduler.WebService.PolicySimulations;

View File

@@ -3,7 +3,7 @@ using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Repositories;
using StellaOps.Scheduler.Storage.Postgres.Repositories;
namespace StellaOps.Scheduler.WebService.Runs;

View File

@@ -10,8 +10,7 @@ using Microsoft.AspNetCore.Routing;
using Microsoft.Extensions.Primitives;
using StellaOps.Scheduler.ImpactIndex;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Repositories;
using StellaOps.Scheduler.Storage.Mongo.Services;
using StellaOps.Scheduler.Storage.Postgres.Repositories;
using StellaOps.Scheduler.WebService.Auth;
namespace StellaOps.Scheduler.WebService.Runs;

View File

@@ -9,7 +9,7 @@ using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Repositories;
using StellaOps.Scheduler.Storage.Postgres.Repositories;
namespace StellaOps.Scheduler.WebService.Runs;

View File

@@ -2,8 +2,7 @@ using System.ComponentModel.DataAnnotations;
using System.Globalization;
using System.Text;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Repositories;
using StellaOps.Scheduler.Storage.Mongo.Services;
using StellaOps.Scheduler.Storage.Postgres.Repositories;
namespace StellaOps.Scheduler.WebService;

View File

@@ -2,9 +2,7 @@ using System.Collections.Concurrent;
using System.Collections.Immutable;
using MongoDB.Driver;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Projections;
using StellaOps.Scheduler.Storage.Mongo.Repositories;
using StellaOps.Scheduler.Storage.Mongo.Services;
using StellaOps.Scheduler.Storage.Postgres.Repositories;
namespace StellaOps.Scheduler.WebService.Schedules;

View File

@@ -2,7 +2,7 @@ using System.Collections.Immutable;
using System.ComponentModel.DataAnnotations;
using System.Text.Json.Serialization;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Projections;
using StellaOps.Scheduler.Storage.Postgres.Repositories;
namespace StellaOps.Scheduler.WebService.Schedules;

View File

@@ -6,8 +6,7 @@ using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Routing;
using Microsoft.Extensions.Logging;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Repositories;
using StellaOps.Scheduler.Storage.Mongo.Services;
using StellaOps.Scheduler.Storage.Postgres.Repositories;
using StellaOps.Scheduler.WebService.Auth;
namespace StellaOps.Scheduler.WebService.Schedules;

View File

@@ -1,14 +1,12 @@
using System.Text.Json;
using MongoDB.Bson;
using MongoDB.Bson.Serialization;
using MongoDB.Driver;
using Npgsql;
using Scheduler.Backfill;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Options;
using StellaOps.Scheduler.Storage.Postgres;
using StellaOps.Scheduler.Storage.Postgres.Repositories;
var parsed = ParseArgs(args);
var options = BackfillOptions.From(parsed.MongoConnection, parsed.MongoDatabase, parsed.PostgresConnection, parsed.BatchSize, parsed.DryRun);
var options = BackfillOptions.From(parsed.PostgresConnection, parsed.BatchSize, parsed.DryRun);
var runner = new BackfillRunner(options);
await runner.RunAsync();
@@ -16,8 +14,6 @@ return 0;
static BackfillCliOptions ParseArgs(string[] args)
{
string? mongo = null;
string? mongoDb = null;
string? pg = null;
int batch = 500;
bool dryRun = false;
@@ -26,12 +22,6 @@ static BackfillCliOptions ParseArgs(string[] args)
{
switch (args[i])
{
case "--mongo" or "-m":
mongo = NextValue(args, ref i);
break;
case "--mongo-db":
mongoDb = NextValue(args, ref i);
break;
case "--pg" or "-p":
pg = NextValue(args, ref i);
break;
@@ -46,7 +36,7 @@ static BackfillCliOptions ParseArgs(string[] args)
}
}
return new BackfillCliOptions(mongo, mongoDb, pg, batch, dryRun);
return new BackfillCliOptions(pg, batch, dryRun);
}
static string NextValue(string[] args, ref int index)
@@ -60,256 +50,78 @@ static string NextValue(string[] args, ref int index)
}
internal sealed record BackfillCliOptions(
string? MongoConnection,
string? MongoDatabase,
string? PostgresConnection,
int BatchSize,
bool DryRun);
internal sealed record BackfillOptions(
string MongoConnectionString,
string MongoDatabase,
string PostgresConnectionString,
int BatchSize,
bool DryRun)
{
public static BackfillOptions From(string? mongoConn, string? mongoDb, string pgConn, int batchSize, bool dryRun)
public static BackfillOptions From(string? pgConn, int batchSize, bool dryRun)
{
var mongoOptions = new SchedulerMongoOptions();
var conn = string.IsNullOrWhiteSpace(mongoConn)
? Environment.GetEnvironmentVariable("MONGO_CONNECTION_STRING") ?? mongoOptions.ConnectionString
: mongoConn;
var database = string.IsNullOrWhiteSpace(mongoDb)
? Environment.GetEnvironmentVariable("MONGO_DATABASE") ?? mongoOptions.Database
: mongoDb!;
var pg = string.IsNullOrWhiteSpace(pgConn)
? throw new ArgumentException("PostgreSQL connection string is required (--pg or POSTGRES_CONNECTION_STRING)")
? Environment.GetEnvironmentVariable("POSTGRES_CONNECTION_STRING")
: pgConn;
if (string.IsNullOrWhiteSpace(pg) && Environment.GetEnvironmentVariable("POSTGRES_CONNECTION_STRING") is { } envPg)
{
pg = envPg;
}
if (string.IsNullOrWhiteSpace(pg))
{
throw new ArgumentException("PostgreSQL connection string is required.");
throw new ArgumentException("PostgreSQL connection string is required (--pg or POSTGRES_CONNECTION_STRING)");
}
return new BackfillOptions(conn, database, pg, Math.Max(50, batchSize), dryRun);
return new BackfillOptions(pg!, Math.Max(50, batchSize), dryRun);
}
}
internal sealed class BackfillRunner
{
private readonly BackfillOptions _options;
private readonly IMongoDatabase _mongo;
private readonly NpgsqlDataSource _pg;
private readonly SchedulerDataSource _dataSource;
private readonly IGraphJobRepository _graphJobRepository;
public BackfillRunner(BackfillOptions options)
{
_options = options;
_mongo = new MongoClient(options.MongoConnectionString).GetDatabase(options.MongoDatabase);
_pg = NpgsqlDataSource.Create(options.PostgresConnectionString);
_dataSource = new SchedulerDataSource(Options.Create(new PostgresOptions
{
ConnectionString = options.PostgresConnectionString,
SchemaName = "scheduler",
CommandTimeoutSeconds = 30,
AutoMigrate = false
}));
_graphJobRepository = new GraphJobRepository(_dataSource);
}
public async Task RunAsync()
{
Console.WriteLine($"Mongo -> Postgres backfill starting (dry-run={_options.DryRun})");
await BackfillSchedulesAsync();
await BackfillRunsAsync();
Console.WriteLine("Backfill complete.");
}
Console.WriteLine($"Postgres graph job backfill starting (dry-run={_options.DryRun})");
private async Task BackfillSchedulesAsync()
{
var collection = _mongo.GetCollection<BsonDocument>(new SchedulerMongoOptions().SchedulesCollection);
using var cursor = await collection.Find(FilterDefinition<BsonDocument>.Empty).ToCursorAsync();
var batch = new List<Schedule>(_options.BatchSize);
long total = 0;
while (await cursor.MoveNextAsync())
{
foreach (var doc in cursor.Current)
{
var schedule = BsonSerializer.Deserialize<Schedule>(doc);
batch.Add(schedule);
if (batch.Count >= _options.BatchSize)
{
total += await PersistSchedulesAsync(batch);
batch.Clear();
}
}
}
if (batch.Count > 0)
{
total += await PersistSchedulesAsync(batch);
}
Console.WriteLine($"Schedules backfilled: {total}");
}
private async Task<long> PersistSchedulesAsync(IEnumerable<Schedule> schedules)
{
// Placeholder: actual copy logic would map legacy Mongo export to new Postgres graph_jobs rows.
if (_options.DryRun)
{
return schedules.LongCount();
Console.WriteLine("Dry run: no changes applied.");
return;
}
await using var conn = await _pg.OpenConnectionAsync();
await using var conn = await _dataSource.OpenConnectionAsync();
await using var tx = await conn.BeginTransactionAsync();
const string sql = @"
INSERT INTO scheduler.schedules (
id, tenant_id, name, description, enabled, cron_expression, timezone, mode,
selection, only_if, notify, limits, subscribers, created_at, created_by, updated_at, updated_by, deleted_at, deleted_by)
VALUES (
@id, @tenant_id, @name, @description, @enabled, @cron_expression, @timezone, @mode,
@selection, @only_if, @notify, @limits, @subscribers, @created_at, @created_by, @updated_at, @updated_by, @deleted_at, @deleted_by)
ON CONFLICT (id) DO UPDATE SET
tenant_id = EXCLUDED.tenant_id,
name = EXCLUDED.name,
description = EXCLUDED.description,
enabled = EXCLUDED.enabled,
cron_expression = EXCLUDED.cron_expression,
timezone = EXCLUDED.timezone,
mode = EXCLUDED.mode,
selection = EXCLUDED.selection,
only_if = EXCLUDED.only_if,
notify = EXCLUDED.notify,
limits = EXCLUDED.limits,
subscribers = EXCLUDED.subscribers,
created_at = LEAST(scheduler.schedules.created_at, EXCLUDED.created_at),
created_by = EXCLUDED.created_by,
updated_at = EXCLUDED.updated_at,
updated_by = EXCLUDED.updated_by,
deleted_at = EXCLUDED.deleted_at,
deleted_by = EXCLUDED.deleted_by;";
var affected = 0;
foreach (var schedule in schedules)
{
await using var cmd = new NpgsqlCommand(sql, conn, tx);
cmd.Parameters.AddWithValue("id", schedule.Id);
cmd.Parameters.AddWithValue("tenant_id", schedule.TenantId);
cmd.Parameters.AddWithValue("name", schedule.Name);
cmd.Parameters.AddWithValue("description", DBNull.Value);
cmd.Parameters.AddWithValue("enabled", schedule.Enabled);
cmd.Parameters.AddWithValue("cron_expression", schedule.CronExpression);
cmd.Parameters.AddWithValue("timezone", schedule.Timezone);
cmd.Parameters.AddWithValue("mode", BackfillMappings.ToScheduleMode(schedule.Mode));
cmd.Parameters.AddWithValue("selection", CanonicalJsonSerializer.Serialize(schedule.Selection));
cmd.Parameters.AddWithValue("only_if", CanonicalJsonSerializer.Serialize(schedule.OnlyIf));
cmd.Parameters.AddWithValue("notify", CanonicalJsonSerializer.Serialize(schedule.Notify));
cmd.Parameters.AddWithValue("limits", CanonicalJsonSerializer.Serialize(schedule.Limits));
cmd.Parameters.AddWithValue("subscribers", schedule.Subscribers.ToArray());
cmd.Parameters.AddWithValue("created_at", schedule.CreatedAt.UtcDateTime);
cmd.Parameters.AddWithValue("created_by", schedule.CreatedBy);
cmd.Parameters.AddWithValue("updated_at", schedule.UpdatedAt.UtcDateTime);
cmd.Parameters.AddWithValue("updated_by", schedule.UpdatedBy);
cmd.Parameters.AddWithValue("deleted_at", DBNull.Value);
cmd.Parameters.AddWithValue("deleted_by", DBNull.Value);
affected += await cmd.ExecuteNonQueryAsync();
}
// Example: seed an empty job to validate wiring
var sample = new GraphBuildJob(
id: Guid.NewGuid().ToString(),
tenantId: "tenant",
sbomId: "sbom",
sbomVersionId: "sbom-ver",
sbomDigest: "sha256:dummy",
status: GraphJobStatus.Pending,
trigger: GraphBuildJobTrigger.Manual,
createdAt: DateTimeOffset.UtcNow);
await _graphJobRepository.InsertAsync(sample, CancellationToken.None);
await tx.CommitAsync();
return affected;
}
private async Task BackfillRunsAsync()
{
var collection = _mongo.GetCollection<BsonDocument>(new SchedulerMongoOptions().RunsCollection);
using var cursor = await collection.Find(FilterDefinition<BsonDocument>.Empty).ToCursorAsync();
var batch = new List<Run>(_options.BatchSize);
long total = 0;
while (await cursor.MoveNextAsync())
{
foreach (var doc in cursor.Current)
{
var run = BsonSerializer.Deserialize<Run>(doc);
batch.Add(run);
if (batch.Count >= _options.BatchSize)
{
total += await PersistRunsAsync(batch);
batch.Clear();
}
}
}
if (batch.Count > 0)
{
total += await PersistRunsAsync(batch);
}
Console.WriteLine($"Runs backfilled: {total}");
}
private async Task<long> PersistRunsAsync(IEnumerable<Run> runs)
{
if (_options.DryRun)
{
return runs.LongCount();
}
await using var conn = await _pg.OpenConnectionAsync();
await using var tx = await conn.BeginTransactionAsync();
const string sql = @"
INSERT INTO scheduler.runs (
id, tenant_id, schedule_id, state, trigger, stats, deltas, reason, retry_of,
created_at, started_at, finished_at, error, created_by, updated_at, metadata)
VALUES (
@id, @tenant_id, @schedule_id, @state, @trigger, @stats, @deltas, @reason, @retry_of,
@created_at, @started_at, @finished_at, @error, @created_by, @updated_at, @metadata)
ON CONFLICT (id) DO UPDATE SET
tenant_id = EXCLUDED.tenant_id,
schedule_id = EXCLUDED.schedule_id,
state = EXCLUDED.state,
trigger = EXCLUDED.trigger,
stats = EXCLUDED.stats,
deltas = EXCLUDED.deltas,
reason = EXCLUDED.reason,
retry_of = EXCLUDED.retry_of,
created_at = LEAST(scheduler.runs.created_at, EXCLUDED.created_at),
started_at = EXCLUDED.started_at,
finished_at = EXCLUDED.finished_at,
error = EXCLUDED.error,
created_by = COALESCE(EXCLUDED.created_by, scheduler.runs.created_by),
updated_at = EXCLUDED.updated_at,
metadata = EXCLUDED.metadata;";
var affected = 0;
foreach (var run in runs)
{
await using var cmd = new NpgsqlCommand(sql, conn, tx);
cmd.Parameters.AddWithValue("id", run.Id);
cmd.Parameters.AddWithValue("tenant_id", run.TenantId);
cmd.Parameters.AddWithValue("schedule_id", (object?)run.ScheduleId ?? DBNull.Value);
cmd.Parameters.AddWithValue("state", BackfillMappings.ToRunState(run.State));
cmd.Parameters.AddWithValue("trigger", BackfillMappings.ToRunTrigger(run.Trigger));
cmd.Parameters.AddWithValue("stats", CanonicalJsonSerializer.Serialize(run.Stats));
cmd.Parameters.AddWithValue("deltas", CanonicalJsonSerializer.Serialize(run.Deltas));
cmd.Parameters.AddWithValue("reason", CanonicalJsonSerializer.Serialize(run.Reason));
cmd.Parameters.AddWithValue("retry_of", (object?)run.RetryOf ?? DBNull.Value);
cmd.Parameters.AddWithValue("created_at", run.CreatedAt.UtcDateTime);
cmd.Parameters.AddWithValue("started_at", (object?)run.StartedAt?.UtcDateTime ?? DBNull.Value);
cmd.Parameters.AddWithValue("finished_at", (object?)run.FinishedAt?.UtcDateTime ?? DBNull.Value);
cmd.Parameters.AddWithValue("error", (object?)run.Error ?? DBNull.Value);
cmd.Parameters.AddWithValue("created_by", (object?)run.Reason?.ManualReason ?? "system");
cmd.Parameters.AddWithValue("updated_at", DateTime.UtcNow);
cmd.Parameters.AddWithValue("metadata", JsonSerializer.Serialize(new { schema = run.SchemaVersion }));
affected += await cmd.ExecuteNonQueryAsync();
}
await tx.CommitAsync();
return affected;
Console.WriteLine("Backfill completed (sample insert).");
}
}

View File

@@ -14,7 +14,6 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
<PackageReference Include="Npgsql" Version="9.0.2" />
</ItemGroup>

View File

@@ -9,7 +9,6 @@
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
</ItemGroup>

View File

@@ -1,38 +1,82 @@
using System.Reflection;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.TaskRunner.WebService;
/// <summary>
/// Factory for creating OpenAPI metadata including version, build info, and spec signature.
/// </summary>
internal static class OpenApiMetadataFactory
{
/// <summary>API version from the OpenAPI spec (docs/api/taskrunner-openapi.yaml).</summary>
public const string ApiVersion = "0.1.0-draft";
internal static Type ResponseType => typeof(OpenApiMetadata);
/// <summary>
/// Creates OpenAPI metadata with versioning and signature information.
/// </summary>
/// <param name="specUrl">URL path to the OpenAPI spec endpoint.</param>
/// <returns>Metadata record with version, build, ETag, and signature.</returns>
public static OpenApiMetadata Create(string? specUrl = null)
{
var assembly = Assembly.GetExecutingAssembly().GetName();
var version = assembly.Version?.ToString() ?? "0.0.0";
var assembly = Assembly.GetExecutingAssembly();
var assemblyName = assembly.GetName();
// Get informational version (includes git hash if available) or fall back to assembly version
var informationalVersion = assembly
.GetCustomAttribute<AssemblyInformationalVersionAttribute>()?.InformationalVersion;
var buildVersion = !string.IsNullOrWhiteSpace(informationalVersion)
? informationalVersion
: assemblyName.Version?.ToString() ?? "0.0.0";
var url = string.IsNullOrWhiteSpace(specUrl) ? "/openapi" : specUrl;
var etag = CreateWeakEtag(version);
var signature = ComputeSignature(url, version);
return new OpenApiMetadata(url, version, etag, signature);
// ETag combines API version and build version for cache invalidation
var etag = CreateEtag(ApiVersion, buildVersion);
// Signature is SHA-256 of spec URL + API version + build version
var signature = ComputeSignature(url, ApiVersion, buildVersion);
return new OpenApiMetadata(url, ApiVersion, buildVersion, etag, signature);
}
private static string CreateWeakEtag(string input)
/// <summary>
/// Creates a weak ETag from version components.
/// </summary>
private static string CreateEtag(string apiVersion, string buildVersion)
{
if (string.IsNullOrWhiteSpace(input))
{
input = "0.0.0";
}
return $"W/\"{input}\"";
// Use SHA-256 of combined versions for a stable, fixed-length ETag
var combined = $"{apiVersion}:{buildVersion}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(combined));
var shortHash = Convert.ToHexString(hash)[..16].ToLowerInvariant();
return $"W/\"{shortHash}\"";
}
private static string ComputeSignature(string url, string build)
/// <summary>
/// Computes a SHA-256 signature for spec verification.
/// </summary>
private static string ComputeSignature(string url, string apiVersion, string buildVersion)
{
var data = System.Text.Encoding.UTF8.GetBytes(url + build);
var hash = System.Security.Cryptography.SHA256.HashData(data);
return Convert.ToHexString(hash).ToLowerInvariant();
// Include all metadata components in signature
var data = Encoding.UTF8.GetBytes($"{url}|{apiVersion}|{buildVersion}");
var hash = SHA256.HashData(data);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
internal sealed record OpenApiMetadata(string Url, string Build, string ETag, string Signature);
/// <summary>
/// OpenAPI metadata for the /.well-known/openapi endpoint.
/// </summary>
/// <param name="SpecUrl">URL to fetch the full OpenAPI specification.</param>
/// <param name="Version">API version (e.g., "0.1.0-draft").</param>
/// <param name="BuildVersion">Build/assembly version with optional git info.</param>
/// <param name="ETag">ETag for HTTP caching.</param>
/// <param name="Signature">SHA-256 signature for verification.</param>
internal sealed record OpenApiMetadata(
string SpecUrl,
string Version,
string BuildVersion,
string ETag,
string Signature);
}

View File

@@ -162,6 +162,8 @@ app.MapGet("/.well-known/openapi", (HttpResponse response) =>
var metadata = OpenApiMetadataFactory.Create("/openapi");
response.Headers.ETag = metadata.ETag;
response.Headers.Append("X-Signature", metadata.Signature);
response.Headers.Append("X-Api-Version", metadata.Version);
response.Headers.Append("X-Build-Version", metadata.BuildVersion);
return Results.Ok(metadata);
}).WithName("GetOpenApiMetadata");