up
This commit is contained in:
@@ -9,15 +9,47 @@ Deliver offline bundle verification and ingestion tooling for sealed environment
|
||||
- CLI + API surfaces for dry-run verification, import, and status queries.
|
||||
- Integration hooks for Conseiller, Excitor, Policy Engine, and Export Center.
|
||||
- Negative-case handling (tampering, expired signatures, root rotation) with operator guidance.
|
||||
- **Monotonicity enforcement** for version rollback prevention (Sprint 0338).
|
||||
- **Quarantine service** for failed bundle forensic analysis (Sprint 0338).
|
||||
- **Evidence reconciliation** with VEX lattice precedence (Sprint 0342).
|
||||
|
||||
## Key Interfaces (per Advisory Implementation)
|
||||
|
||||
### Versioning (Sprint 0338)
|
||||
- `IVersionMonotonicityChecker` - Validates incoming versions are newer than active
|
||||
- `IBundleVersionStore` - Postgres-backed version tracking per tenant/type
|
||||
- `BundleVersion` - SemVer + timestamp model with `IsNewerThan()` comparison
|
||||
|
||||
### Quarantine (Sprint 0338)
|
||||
- `IQuarantineService` - Preserves failed bundles with diagnostics
|
||||
- `FileSystemQuarantineService` - Implementation with TTL cleanup
|
||||
- Structure: `/updates/quarantine/<timestamp>-<reason>/` with bundle, manifest, verification.log, failure-reason.txt
|
||||
|
||||
### Telemetry (Sprint 0341)
|
||||
- `OfflineKitMetrics` - Prometheus metrics (import counts, latencies)
|
||||
- `OfflineKitLogFields` - Standardized structured logging constants
|
||||
- `IOfflineKitAuditEmitter` - Audit event emission to Authority schema
|
||||
|
||||
### Reconciliation (Sprint 0342)
|
||||
- `IEvidenceReconciler` - Orchestrates 5-step algorithm per advisory §5
|
||||
- `ArtifactIndex` - Digest-keyed, deterministically ordered artifact store
|
||||
- `IEvidenceCollector` - Collects SBOMs, attestations, VEX from evidence directory
|
||||
- `PrecedenceLattice` - VEX merge with vendor > maintainer > 3rd-party precedence
|
||||
- `EvidenceGraphEmitter` - Deterministic graph output with DSSE signing
|
||||
|
||||
## Definition of Done
|
||||
- Deterministic fixtures for valid/invalid bundles committed.
|
||||
- Integration tests prove catalog + object-store updates are idempotent.
|
||||
- Import audit trail viewable via API and timeline events.
|
||||
- **Monotonicity check blocks rollback unless force-activated with reason.**
|
||||
- **Failed bundles are quarantined with full diagnostic context.**
|
||||
- **Evidence reconciliation produces identical output for identical input.**
|
||||
|
||||
## Required Reading
|
||||
- `docs/airgap/airgap-mode.md`
|
||||
- `docs/airgap/advisory-implementation-roadmap.md`
|
||||
- `docs/modules/platform/architecture-overview.md`
|
||||
- `docs/product-advisories/14-Dec-2025 - Offline and Air-Gap Technical Reference.md`
|
||||
|
||||
## Working Agreement
|
||||
- 1. Update task status to `DOING`/`DONE` in both correspoding sprint file `/docs/implplan/SPRINT_*.md` and the local `TASKS.md` when you start or finish work.
|
||||
|
||||
870
src/Api/StellaOps.Api.OpenApi/scanner/openapi.yaml
Normal file
870
src/Api/StellaOps.Api.OpenApi/scanner/openapi.yaml
Normal file
@@ -0,0 +1,870 @@
|
||||
openapi: 3.1.0
|
||||
info:
|
||||
title: StellaOps Scanner API
|
||||
version: 1.0.0
|
||||
description: |
|
||||
Scanner service APIs for call graph ingestion, reachability computation,
|
||||
and vulnerability finding queries. Supports CI/CD integration with
|
||||
idempotent submissions and async computation.
|
||||
|
||||
servers:
|
||||
- url: /api
|
||||
description: Scanner service endpoint
|
||||
|
||||
tags:
|
||||
- name: Scans
|
||||
description: Scan lifecycle management
|
||||
- name: CallGraphs
|
||||
description: Call graph ingestion
|
||||
- name: RuntimeEvidence
|
||||
description: Runtime evidence collection
|
||||
- name: Reachability
|
||||
description: Reachability analysis and queries
|
||||
- name: Exports
|
||||
description: Report exports
|
||||
- name: ProofSpines
|
||||
description: Verifiable audit trails
|
||||
|
||||
paths:
|
||||
/scans:
|
||||
post:
|
||||
tags: [Scans]
|
||||
operationId: createScan
|
||||
summary: Create a new scan
|
||||
description: |
|
||||
Initiates a new scan context. Returns a scanId for subsequent
|
||||
call graph and evidence submissions.
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CreateScanRequest'
|
||||
responses:
|
||||
'201':
|
||||
description: Scan created
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CreateScanResponse'
|
||||
'400':
|
||||
$ref: '#/components/responses/BadRequest'
|
||||
|
||||
/scans/{scanId}:
|
||||
get:
|
||||
tags: [Scans]
|
||||
operationId: getScan
|
||||
summary: Get scan status
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/ScanIdPath'
|
||||
responses:
|
||||
'200':
|
||||
description: Scan details
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ScanDetails'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
|
||||
/scans/{scanId}/callgraphs:
|
||||
post:
|
||||
tags: [CallGraphs]
|
||||
operationId: submitCallGraph
|
||||
summary: Submit a call graph
|
||||
description: |
|
||||
Submits a language-specific call graph for reachability analysis.
|
||||
Idempotent: duplicate submissions with same Content-Digest are ignored.
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/ScanIdPath'
|
||||
- name: Content-Digest
|
||||
in: header
|
||||
required: true
|
||||
description: SHA-256 digest for idempotency (RFC 9530)
|
||||
schema:
|
||||
type: string
|
||||
example: sha-256=:X48E9qOokqqrvdts8nOJRJN3OWDUoyWxBf7kbu9DBPE=:
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CallGraphV1'
|
||||
application/x-ndjson:
|
||||
schema:
|
||||
type: string
|
||||
description: Streaming NDJSON for large graphs
|
||||
responses:
|
||||
'202':
|
||||
description: Call graph accepted
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CallGraphAcceptedResponse'
|
||||
'409':
|
||||
description: Duplicate submission (idempotent success)
|
||||
'400':
|
||||
$ref: '#/components/responses/BadRequest'
|
||||
'413':
|
||||
description: Call graph too large
|
||||
|
||||
/scans/{scanId}/runtimeevidence:
|
||||
post:
|
||||
tags: [RuntimeEvidence]
|
||||
operationId: submitRuntimeEvidence
|
||||
summary: Submit runtime evidence
|
||||
description: |
|
||||
Submits runtime execution evidence (stack traces, loaded modules).
|
||||
Merges with existing evidence for the scan.
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/ScanIdPath'
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RuntimeEvidenceV1'
|
||||
responses:
|
||||
'202':
|
||||
description: Evidence accepted
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RuntimeEvidenceAcceptedResponse'
|
||||
'400':
|
||||
$ref: '#/components/responses/BadRequest'
|
||||
|
||||
/scans/{scanId}/sbom:
|
||||
post:
|
||||
tags: [Scans]
|
||||
operationId: submitSbom
|
||||
summary: Submit SBOM for scan
|
||||
description: |
|
||||
Associates an SBOM (CycloneDX or SPDX) with the scan.
|
||||
Required before reachability computation.
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/ScanIdPath'
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/vnd.cyclonedx+json:
|
||||
schema:
|
||||
type: object
|
||||
application/spdx+json:
|
||||
schema:
|
||||
type: object
|
||||
responses:
|
||||
'202':
|
||||
description: SBOM accepted
|
||||
'400':
|
||||
$ref: '#/components/responses/BadRequest'
|
||||
|
||||
/scans/{scanId}/compute-reachability:
|
||||
post:
|
||||
tags: [Reachability]
|
||||
operationId: computeReachability
|
||||
summary: Trigger reachability computation
|
||||
description: |
|
||||
Triggers reachability analysis for the scan. Idempotent.
|
||||
Computation is asynchronous; poll scan status for completion.
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/ScanIdPath'
|
||||
requestBody:
|
||||
required: false
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ComputeReachabilityRequest'
|
||||
responses:
|
||||
'202':
|
||||
description: Computation started
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ComputeReachabilityResponse'
|
||||
'409':
|
||||
description: Computation already in progress
|
||||
'400':
|
||||
$ref: '#/components/responses/BadRequest'
|
||||
|
||||
/scans/{scanId}/reachability/components:
|
||||
get:
|
||||
tags: [Reachability]
|
||||
operationId: getReachabilityByComponent
|
||||
summary: Get reachability status by component
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/ScanIdPath'
|
||||
- name: purl
|
||||
in: query
|
||||
description: Filter by Package URL
|
||||
schema:
|
||||
type: string
|
||||
- name: status
|
||||
in: query
|
||||
description: Filter by reachability status
|
||||
schema:
|
||||
type: string
|
||||
enum: [reachable, unreachable, possibly_reachable, unknown]
|
||||
responses:
|
||||
'200':
|
||||
description: Component reachability results
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ComponentReachabilityList'
|
||||
|
||||
/scans/{scanId}/reachability/findings:
|
||||
get:
|
||||
tags: [Reachability]
|
||||
operationId: getReachabilityFindings
|
||||
summary: Get vulnerability findings with reachability
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/ScanIdPath'
|
||||
- name: cve
|
||||
in: query
|
||||
description: Filter by CVE ID
|
||||
schema:
|
||||
type: string
|
||||
- name: status
|
||||
in: query
|
||||
description: Filter by reachability status
|
||||
schema:
|
||||
type: string
|
||||
enum: [reachable, unreachable, possibly_reachable, unknown]
|
||||
responses:
|
||||
'200':
|
||||
description: Vulnerability findings with reachability
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ReachabilityFindingList'
|
||||
|
||||
/scans/{scanId}/reachability/explain:
|
||||
get:
|
||||
tags: [Reachability]
|
||||
operationId: explainReachability
|
||||
summary: Explain reachability for CVE/component
|
||||
description: |
|
||||
Returns detailed explanation of why a CVE affects a component,
|
||||
including path witness, evidence chain, and contributing factors.
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/ScanIdPath'
|
||||
- name: cve
|
||||
in: query
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- name: purl
|
||||
in: query
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Reachability explanation
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ReachabilityExplanation'
|
||||
'404':
|
||||
description: CVE/component combination not found
|
||||
|
||||
/scans/{scanId}/exports/sarif:
|
||||
get:
|
||||
tags: [Exports]
|
||||
operationId: exportSarif
|
||||
summary: Export findings as SARIF
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/ScanIdPath'
|
||||
responses:
|
||||
'200':
|
||||
description: SARIF report
|
||||
content:
|
||||
application/sarif+json:
|
||||
schema:
|
||||
type: object
|
||||
|
||||
/scans/{scanId}/exports/cdxr:
|
||||
get:
|
||||
tags: [Exports]
|
||||
operationId: exportCycloneDxReachability
|
||||
summary: Export as CycloneDX with reachability extension
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/ScanIdPath'
|
||||
responses:
|
||||
'200':
|
||||
description: CycloneDX with reachability
|
||||
content:
|
||||
application/vnd.cyclonedx+json:
|
||||
schema:
|
||||
type: object
|
||||
|
||||
/scans/{scanId}/exports/openvex:
|
||||
get:
|
||||
tags: [Exports]
|
||||
operationId: exportOpenVex
|
||||
summary: Export as OpenVEX
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/ScanIdPath'
|
||||
responses:
|
||||
'200':
|
||||
description: OpenVEX document
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
|
||||
/scans/{scanId}/spines:
|
||||
get:
|
||||
tags: [ProofSpines]
|
||||
operationId: getSpinesByScan
|
||||
summary: List proof spines for a scan
|
||||
parameters:
|
||||
- $ref: '#/components/parameters/ScanIdPath'
|
||||
responses:
|
||||
'200':
|
||||
description: Proof spines for scan
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ProofSpineList'
|
||||
|
||||
/spines/{spineId}:
|
||||
get:
|
||||
tags: [ProofSpines]
|
||||
operationId: getSpine
|
||||
summary: Get a proof spine
|
||||
description: Returns full spine with all segments and verification status.
|
||||
parameters:
|
||||
- name: spineId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Proof spine details
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ProofSpine'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
|
||||
components:
|
||||
parameters:
|
||||
ScanIdPath:
|
||||
name: scanId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
|
||||
responses:
|
||||
BadRequest:
|
||||
description: Invalid request
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorResponse'
|
||||
NotFound:
|
||||
description: Resource not found
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ErrorResponse'
|
||||
|
||||
schemas:
|
||||
CreateScanRequest:
|
||||
type: object
|
||||
required: [artifactDigest]
|
||||
properties:
|
||||
artifactDigest:
|
||||
type: string
|
||||
description: Image or artifact digest (sha256:...)
|
||||
repoUri:
|
||||
type: string
|
||||
commitSha:
|
||||
type: string
|
||||
policyProfileId:
|
||||
type: string
|
||||
metadata:
|
||||
type: object
|
||||
additionalProperties: true
|
||||
|
||||
CreateScanResponse:
|
||||
type: object
|
||||
properties:
|
||||
scanId:
|
||||
type: string
|
||||
format: uuid
|
||||
status:
|
||||
type: string
|
||||
enum: [created, pending, processing, completed, failed]
|
||||
createdAt:
|
||||
type: string
|
||||
format: date-time
|
||||
|
||||
ScanDetails:
|
||||
type: object
|
||||
properties:
|
||||
scanId:
|
||||
type: string
|
||||
status:
|
||||
type: string
|
||||
artifactDigest:
|
||||
type: string
|
||||
callGraphCount:
|
||||
type: integer
|
||||
runtimeEvidenceCount:
|
||||
type: integer
|
||||
reachabilityStatus:
|
||||
type: string
|
||||
enum: [pending, computing, completed, failed]
|
||||
createdAt:
|
||||
type: string
|
||||
format: date-time
|
||||
completedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
|
||||
CallGraphV1:
|
||||
type: object
|
||||
required: [schema, scanKey, language, nodes, edges]
|
||||
properties:
|
||||
schema:
|
||||
type: string
|
||||
const: stella.callgraph.v1
|
||||
scanKey:
|
||||
type: string
|
||||
format: uuid
|
||||
language:
|
||||
type: string
|
||||
enum: [dotnet, java, node, python, go, rust, binary, ruby, php]
|
||||
artifacts:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/CallGraphArtifact'
|
||||
nodes:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/CallGraphNode'
|
||||
edges:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/CallGraphEdge'
|
||||
entrypoints:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/CallGraphEntrypoint'
|
||||
|
||||
CallGraphArtifact:
|
||||
type: object
|
||||
properties:
|
||||
artifactKey:
|
||||
type: string
|
||||
kind:
|
||||
type: string
|
||||
enum: [assembly, jar, module, binary]
|
||||
sha256:
|
||||
type: string
|
||||
purl:
|
||||
type: string
|
||||
|
||||
CallGraphNode:
|
||||
type: object
|
||||
required: [nodeId, symbolKey]
|
||||
properties:
|
||||
nodeId:
|
||||
type: string
|
||||
artifactKey:
|
||||
type: string
|
||||
symbolKey:
|
||||
type: string
|
||||
description: Canonical symbol key (Namespace.Type::Method(signature))
|
||||
visibility:
|
||||
type: string
|
||||
enum: [public, internal, private, unknown]
|
||||
isEntrypointCandidate:
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
CallGraphEdge:
|
||||
type: object
|
||||
required: [from, to]
|
||||
properties:
|
||||
from:
|
||||
type: string
|
||||
description: Source node ID
|
||||
to:
|
||||
type: string
|
||||
description: Target node ID
|
||||
kind:
|
||||
type: string
|
||||
enum: [static, heuristic]
|
||||
default: static
|
||||
reason:
|
||||
type: string
|
||||
enum: [direct_call, virtual_call, reflection_string, di_binding, dynamic_import, unknown]
|
||||
weight:
|
||||
type: number
|
||||
default: 1.0
|
||||
|
||||
CallGraphEntrypoint:
|
||||
type: object
|
||||
required: [nodeId, kind]
|
||||
properties:
|
||||
nodeId:
|
||||
type: string
|
||||
kind:
|
||||
type: string
|
||||
enum: [http, grpc, cli, job, event, unknown]
|
||||
route:
|
||||
type: string
|
||||
description: HTTP route pattern (e.g., /api/orders/{id})
|
||||
framework:
|
||||
type: string
|
||||
enum: [aspnetcore, minimalapi, spring, express, fastapi, unknown]
|
||||
|
||||
CallGraphAcceptedResponse:
|
||||
type: object
|
||||
properties:
|
||||
callgraphId:
|
||||
type: string
|
||||
nodeCount:
|
||||
type: integer
|
||||
edgeCount:
|
||||
type: integer
|
||||
digest:
|
||||
type: string
|
||||
|
||||
RuntimeEvidenceV1:
|
||||
type: object
|
||||
required: [schema, scanKey, collectedAt]
|
||||
properties:
|
||||
schema:
|
||||
type: string
|
||||
const: stella.runtimeevidence.v1
|
||||
scanKey:
|
||||
type: string
|
||||
format: uuid
|
||||
collectedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
environment:
|
||||
$ref: '#/components/schemas/RuntimeEnvironment'
|
||||
samples:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/RuntimeSample'
|
||||
loadedArtifacts:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/LoadedArtifact'
|
||||
|
||||
RuntimeEnvironment:
|
||||
type: object
|
||||
properties:
|
||||
os:
|
||||
type: string
|
||||
k8s:
|
||||
type: object
|
||||
properties:
|
||||
namespace:
|
||||
type: string
|
||||
pod:
|
||||
type: string
|
||||
container:
|
||||
type: string
|
||||
imageDigest:
|
||||
type: string
|
||||
buildId:
|
||||
type: string
|
||||
|
||||
RuntimeSample:
|
||||
type: object
|
||||
properties:
|
||||
timestamp:
|
||||
type: string
|
||||
format: date-time
|
||||
pid:
|
||||
type: integer
|
||||
threadId:
|
||||
type: integer
|
||||
frames:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: Array of node IDs representing call stack
|
||||
sampleWeight:
|
||||
type: number
|
||||
default: 1.0
|
||||
|
||||
LoadedArtifact:
|
||||
type: object
|
||||
properties:
|
||||
artifactKey:
|
||||
type: string
|
||||
evidence:
|
||||
type: string
|
||||
enum: [loaded_module, mapped_file, jar_loaded]
|
||||
|
||||
RuntimeEvidenceAcceptedResponse:
|
||||
type: object
|
||||
properties:
|
||||
evidenceId:
|
||||
type: string
|
||||
sampleCount:
|
||||
type: integer
|
||||
loadedArtifactCount:
|
||||
type: integer
|
||||
|
||||
ComputeReachabilityRequest:
|
||||
type: object
|
||||
properties:
|
||||
forceRecompute:
|
||||
type: boolean
|
||||
default: false
|
||||
entrypoints:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: Override auto-detected entrypoints
|
||||
targets:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: Specific symbols to analyze
|
||||
|
||||
ComputeReachabilityResponse:
|
||||
type: object
|
||||
properties:
|
||||
jobId:
|
||||
type: string
|
||||
status:
|
||||
type: string
|
||||
enum: [queued, processing]
|
||||
estimatedDuration:
|
||||
type: string
|
||||
description: ISO-8601 duration estimate
|
||||
|
||||
ComponentReachabilityList:
|
||||
type: object
|
||||
properties:
|
||||
items:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/ComponentReachability'
|
||||
total:
|
||||
type: integer
|
||||
|
||||
ComponentReachability:
|
||||
type: object
|
||||
properties:
|
||||
purl:
|
||||
type: string
|
||||
status:
|
||||
type: string
|
||||
enum: [reachable, unreachable, possibly_reachable, unknown]
|
||||
confidence:
|
||||
type: number
|
||||
latticeState:
|
||||
type: string
|
||||
why:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
|
||||
ReachabilityFindingList:
|
||||
type: object
|
||||
properties:
|
||||
items:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/ReachabilityFinding'
|
||||
total:
|
||||
type: integer
|
||||
|
||||
ReachabilityFinding:
|
||||
type: object
|
||||
properties:
|
||||
cveId:
|
||||
type: string
|
||||
purl:
|
||||
type: string
|
||||
status:
|
||||
type: string
|
||||
confidence:
|
||||
type: number
|
||||
latticeState:
|
||||
type: string
|
||||
severity:
|
||||
type: string
|
||||
affectedVersions:
|
||||
type: string
|
||||
|
||||
ReachabilityExplanation:
|
||||
type: object
|
||||
properties:
|
||||
cveId:
|
||||
type: string
|
||||
purl:
|
||||
type: string
|
||||
status:
|
||||
type: string
|
||||
confidence:
|
||||
type: number
|
||||
latticeState:
|
||||
type: string
|
||||
pathWitness:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: Symbol path from entrypoint to vulnerable code
|
||||
why:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/ExplanationReason'
|
||||
evidence:
|
||||
$ref: '#/components/schemas/EvidenceChain'
|
||||
spineId:
|
||||
type: string
|
||||
description: Reference to ProofSpine for full audit trail
|
||||
|
||||
ExplanationReason:
|
||||
type: object
|
||||
properties:
|
||||
code:
|
||||
type: string
|
||||
description:
|
||||
type: string
|
||||
impact:
|
||||
type: number
|
||||
|
||||
EvidenceChain:
|
||||
type: object
|
||||
properties:
|
||||
staticAnalysis:
|
||||
type: object
|
||||
properties:
|
||||
callgraphDigest:
|
||||
type: string
|
||||
pathLength:
|
||||
type: integer
|
||||
edgeTypes:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
runtimeEvidence:
|
||||
type: object
|
||||
properties:
|
||||
observed:
|
||||
type: boolean
|
||||
hitCount:
|
||||
type: integer
|
||||
lastObserved:
|
||||
type: string
|
||||
format: date-time
|
||||
policyEvaluation:
|
||||
type: object
|
||||
properties:
|
||||
policyDigest:
|
||||
type: string
|
||||
verdict:
|
||||
type: string
|
||||
verdictReason:
|
||||
type: string
|
||||
|
||||
ProofSpineList:
|
||||
type: object
|
||||
properties:
|
||||
items:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/ProofSpineSummary'
|
||||
total:
|
||||
type: integer
|
||||
|
||||
ProofSpineSummary:
|
||||
type: object
|
||||
properties:
|
||||
spineId:
|
||||
type: string
|
||||
artifactId:
|
||||
type: string
|
||||
vulnerabilityId:
|
||||
type: string
|
||||
verdict:
|
||||
type: string
|
||||
segmentCount:
|
||||
type: integer
|
||||
createdAt:
|
||||
type: string
|
||||
format: date-time
|
||||
|
||||
ProofSpine:
|
||||
type: object
|
||||
properties:
|
||||
spineId:
|
||||
type: string
|
||||
artifactId:
|
||||
type: string
|
||||
vulnerabilityId:
|
||||
type: string
|
||||
policyProfileId:
|
||||
type: string
|
||||
verdict:
|
||||
type: string
|
||||
verdictReason:
|
||||
type: string
|
||||
rootHash:
|
||||
type: string
|
||||
scanRunId:
|
||||
type: string
|
||||
segments:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/ProofSegment'
|
||||
createdAt:
|
||||
type: string
|
||||
format: date-time
|
||||
supersededBySpineId:
|
||||
type: string
|
||||
|
||||
ProofSegment:
|
||||
type: object
|
||||
properties:
|
||||
segmentId:
|
||||
type: string
|
||||
segmentType:
|
||||
type: string
|
||||
enum: [SBOM_SLICE, MATCH, REACHABILITY, GUARD_ANALYSIS, RUNTIME_OBSERVATION, POLICY_EVAL]
|
||||
index:
|
||||
type: integer
|
||||
inputHash:
|
||||
type: string
|
||||
resultHash:
|
||||
type: string
|
||||
prevSegmentHash:
|
||||
type: string
|
||||
toolId:
|
||||
type: string
|
||||
toolVersion:
|
||||
type: string
|
||||
status:
|
||||
type: string
|
||||
enum: [pending, verified, partial, invalid, untrusted]
|
||||
createdAt:
|
||||
type: string
|
||||
format: date-time
|
||||
|
||||
ErrorResponse:
|
||||
type: object
|
||||
properties:
|
||||
error:
|
||||
type: string
|
||||
message:
|
||||
type: string
|
||||
details:
|
||||
type: object
|
||||
@@ -40,8 +40,101 @@ Deliver the API, workers, and storage that power signing, verification, and life
|
||||
|
||||
## Required Reading
|
||||
- `docs/modules/attestor/architecture.md`
|
||||
- `docs/modules/attestor/rekor-verification-design.md`
|
||||
- `docs/modules/platform/architecture-overview.md`
|
||||
|
||||
---
|
||||
|
||||
## Active Sprints — Rekor Verification Enhancement
|
||||
|
||||
### SPRINT_3000_0001_0001: Merkle Proof Verification (P0)
|
||||
|
||||
**Objective**: Implement cryptographic verification of Rekor inclusion proofs for offline/air-gap attestation validation.
|
||||
|
||||
**Key Contracts**:
|
||||
|
||||
```csharp
|
||||
// IRekorClient.cs — New method
|
||||
Task<RekorInclusionVerificationResult> VerifyInclusionAsync(
|
||||
AttestorEntry entry,
|
||||
byte[] payloadDigest,
|
||||
byte[] rekorPublicKey,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
// MerkleProofVerifier.cs — RFC 6962 implementation
|
||||
public static bool VerifyInclusion(
|
||||
byte[] leafHash,
|
||||
long leafIndex,
|
||||
long treeSize,
|
||||
IReadOnlyList<byte[]> proofHashes,
|
||||
byte[] expectedRootHash);
|
||||
```
|
||||
|
||||
**New Files**:
|
||||
- `StellaOps.Attestor.Core/Rekor/RekorInclusionVerificationResult.cs`
|
||||
- `StellaOps.Attestor.Core/Verification/MerkleProofVerifier.cs`
|
||||
- `StellaOps.Attestor.Core/Verification/CheckpointVerifier.cs`
|
||||
|
||||
### SPRINT_3000_0001_0002: Rekor Retry Queue & Metrics (P1)
|
||||
|
||||
**Objective**: Implement durable retry queue for failed Rekor submissions with operational metrics.
|
||||
|
||||
**Key Contracts**:
|
||||
|
||||
```csharp
|
||||
// IRekorSubmissionQueue.cs
|
||||
public interface IRekorSubmissionQueue
|
||||
{
|
||||
Task<Guid> EnqueueAsync(string tenantId, string bundleSha256, byte[] dssePayload, string backend, CancellationToken ct);
|
||||
Task<IReadOnlyList<RekorQueueItem>> DequeueAsync(int batchSize, CancellationToken ct);
|
||||
Task MarkSubmittedAsync(Guid id, string rekorUuid, long? logIndex, CancellationToken ct);
|
||||
Task MarkRetryAsync(Guid id, string error, CancellationToken ct);
|
||||
Task MarkDeadLetterAsync(Guid id, string error, CancellationToken ct);
|
||||
Task<QueueDepthSnapshot> GetQueueDepthAsync(CancellationToken ct);
|
||||
}
|
||||
```
|
||||
|
||||
**New Metrics**:
|
||||
- `attestor.rekor_queue_depth` (gauge)
|
||||
- `attestor.rekor_retry_attempts_total` (counter)
|
||||
- `attestor.rekor_submission_status_total` (counter)
|
||||
|
||||
**New Files**:
|
||||
- `StellaOps.Attestor.Core/Queue/IRekorSubmissionQueue.cs`
|
||||
- `StellaOps.Attestor.Infrastructure/Queue/PostgresRekorSubmissionQueue.cs`
|
||||
- `StellaOps.Attestor.Infrastructure/Workers/RekorRetryWorker.cs`
|
||||
- `Migrations/00X_rekor_submission_queue.sql`
|
||||
|
||||
### SPRINT_3000_0001_0003: Time Skew Validation (P2)
|
||||
|
||||
**Objective**: Validate Rekor `integrated_time` to detect backdated or anomalous entries.
|
||||
|
||||
**Key Contracts**:
|
||||
|
||||
```csharp
|
||||
// ITimeSkewValidator.cs
|
||||
public interface ITimeSkewValidator
|
||||
{
|
||||
TimeSkewResult Validate(DateTimeOffset integratedTime, DateTimeOffset localTime);
|
||||
}
|
||||
|
||||
public sealed record TimeSkewResult(
|
||||
TimeSkewSeverity Severity, // Ok, Warning, Rejected
|
||||
TimeSpan Skew,
|
||||
string? Message);
|
||||
```
|
||||
|
||||
**Configuration** (`AttestorOptions.TimeSkewOptions`):
|
||||
- `WarnThresholdSeconds`: 300 (5 min)
|
||||
- `RejectThresholdSeconds`: 3600 (1 hour)
|
||||
- `FutureToleranceSeconds`: 60
|
||||
|
||||
**New Files**:
|
||||
- `StellaOps.Attestor.Core/Validation/ITimeSkewValidator.cs`
|
||||
- `StellaOps.Attestor.Infrastructure/Validation/TimeSkewValidator.cs`
|
||||
|
||||
---
|
||||
|
||||
## Working Agreement
|
||||
- 1. Update task status to `DOING`/`DONE` in both correspoding sprint file `/docs/implplan/SPRINT_*.md` and the local `TASKS.md` when you start or finish work.
|
||||
- 2. Review this charter and the Required Reading documents before coding; confirm prerequisites are met.
|
||||
|
||||
57
src/Attestor/StellaOps.Attestor/TASKS.md
Normal file
57
src/Attestor/StellaOps.Attestor/TASKS.md
Normal file
@@ -0,0 +1,57 @@
|
||||
# Attestor · Sprint 3000-0001-0001 (Rekor Merkle Proof Verification)
|
||||
|
||||
| Task ID | Status | Notes | Updated (UTC) |
|
||||
| --- | --- | --- | --- |
|
||||
| SPRINT_3000_0001_0001-T1 | DOING | Add `VerifyInclusionAsync` contract + wire initial verifier plumbing. | 2025-12-14 |
|
||||
| SPRINT_3000_0001_0001-T2 | TODO | | |
|
||||
| SPRINT_3000_0001_0001-T3 | TODO | | |
|
||||
| SPRINT_3000_0001_0001-T4 | TODO | | |
|
||||
| SPRINT_3000_0001_0001-T5 | TODO | | |
|
||||
| SPRINT_3000_0001_0001-T6 | TODO | | |
|
||||
| SPRINT_3000_0001_0001-T7 | TODO | | |
|
||||
| SPRINT_3000_0001_0001-T8 | TODO | | |
|
||||
| SPRINT_3000_0001_0001-T9 | TODO | | |
|
||||
| SPRINT_3000_0001_0001-T10 | TODO | | |
|
||||
| SPRINT_3000_0001_0001-T11 | TODO | | |
|
||||
| SPRINT_3000_0001_0001-T12 | TODO | | |
|
||||
|
||||
# Attestor · Sprint 3000-0001-0002 (Rekor Durable Retry Queue & Metrics)
|
||||
|
||||
| Task ID | Status | Notes | Updated (UTC) |
|
||||
| --- | --- | --- | --- |
|
||||
| SPRINT_3000_0001_0002-T1 | TODO | | |
|
||||
| SPRINT_3000_0001_0002-T2 | TODO | | |
|
||||
| SPRINT_3000_0001_0002-T3 | TODO | | |
|
||||
| SPRINT_3000_0001_0002-T4 | TODO | | |
|
||||
| SPRINT_3000_0001_0002-T5 | TODO | | |
|
||||
| SPRINT_3000_0001_0002-T6 | TODO | | |
|
||||
| SPRINT_3000_0001_0002-T7 | TODO | | |
|
||||
| SPRINT_3000_0001_0002-T8 | TODO | | |
|
||||
| SPRINT_3000_0001_0002-T9 | TODO | | |
|
||||
| SPRINT_3000_0001_0002-T10 | TODO | | |
|
||||
| SPRINT_3000_0001_0002-T11 | TODO | | |
|
||||
| SPRINT_3000_0001_0002-T12 | TODO | | |
|
||||
| SPRINT_3000_0001_0002-T13 | TODO | | |
|
||||
| SPRINT_3000_0001_0002-T14 | TODO | | |
|
||||
| SPRINT_3000_0001_0002-T15 | TODO | | |
|
||||
|
||||
# Attestor · Sprint 3000-0001-0003 (Rekor Integrated Time Skew Validation)
|
||||
|
||||
| Task ID | Status | Notes | Updated (UTC) |
|
||||
| --- | --- | --- | --- |
|
||||
| SPRINT_3000_0001_0003-T1 | TODO | | |
|
||||
| SPRINT_3000_0001_0003-T2 | TODO | | |
|
||||
| SPRINT_3000_0001_0003-T3 | TODO | | |
|
||||
| SPRINT_3000_0001_0003-T4 | TODO | | |
|
||||
| SPRINT_3000_0001_0003-T5 | TODO | | |
|
||||
| SPRINT_3000_0001_0003-T6 | TODO | | |
|
||||
| SPRINT_3000_0001_0003-T7 | TODO | | |
|
||||
| SPRINT_3000_0001_0003-T8 | TODO | | |
|
||||
| SPRINT_3000_0001_0003-T9 | TODO | | |
|
||||
| SPRINT_3000_0001_0003-T10 | TODO | | |
|
||||
| SPRINT_3000_0001_0003-T11 | TODO | | |
|
||||
|
||||
Status changes must be mirrored in:
|
||||
- `docs/implplan/SPRINT_3000_0001_0001_rekor_merkle_proof_verification.md`
|
||||
- `docs/implplan/SPRINT_3000_0001_0002_rekor_retry_queue_metrics.md`
|
||||
- `docs/implplan/SPRINT_3000_0001_0003_rekor_time_skew_validation.md`
|
||||
@@ -0,0 +1,23 @@
|
||||
# StellaOps.Attestor.ProofChain — Local Agent Charter
|
||||
|
||||
## Scope
|
||||
- This charter applies to `src/Attestor/__Libraries/StellaOps.Attestor.ProofChain/**`.
|
||||
|
||||
## Primary roles
|
||||
- Backend engineer (C# / .NET 10).
|
||||
- QA automation engineer (xUnit).
|
||||
|
||||
## Required reading (treat as read before edits)
|
||||
- `docs/modules/attestor/architecture.md`
|
||||
- `docs/product-advisories/14-Dec-2025 - Proof and Evidence Chain Technical Reference.md`
|
||||
- RFC 8785 (JSON Canonicalization Scheme)
|
||||
|
||||
## Working agreements
|
||||
- Determinism is mandatory: stable ordering, stable hashes, UTC timestamps only.
|
||||
- No network dependence in library code paths; keep implementations offline-friendly.
|
||||
- Prefer small, composable services with explicit interfaces (`I*`).
|
||||
|
||||
## Testing expectations
|
||||
- Every behavior change must be covered by tests under `src/Attestor/__Tests/StellaOps.Attestor.ProofChain.Tests`.
|
||||
- Include determinism tests (same inputs -> same IDs/hashes) and negative tests (invalid formats).
|
||||
|
||||
@@ -0,0 +1,155 @@
|
||||
using System;
|
||||
using StellaOps.Attestor.ProofChain.Internal;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Identifiers;
|
||||
|
||||
public abstract record ContentAddressedId
|
||||
{
|
||||
protected ContentAddressedId(string algorithm, string digest)
|
||||
{
|
||||
Algorithm = NormalizeAlgorithm(algorithm, nameof(algorithm));
|
||||
Digest = NormalizeDigest(Algorithm, digest);
|
||||
}
|
||||
|
||||
public string Algorithm { get; }
|
||||
|
||||
public string Digest { get; }
|
||||
|
||||
public override string ToString() => $"{Algorithm}:{Digest}";
|
||||
|
||||
public static GenericContentAddressedId Parse(string value)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(value);
|
||||
|
||||
if (!TrySplit(value, out var algorithm, out var digest))
|
||||
{
|
||||
throw new FormatException($"Invalid content-addressed ID format: '{value}'.");
|
||||
}
|
||||
|
||||
return new GenericContentAddressedId(algorithm, digest);
|
||||
}
|
||||
|
||||
internal static bool TrySplit(string value, out string algorithm, out string digest)
|
||||
{
|
||||
algorithm = string.Empty;
|
||||
digest = string.Empty;
|
||||
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var separator = value.IndexOf(':', StringComparison.Ordinal);
|
||||
if (separator <= 0 || separator == value.Length - 1)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
algorithm = value[..separator];
|
||||
digest = value[(separator + 1)..];
|
||||
if (string.IsNullOrWhiteSpace(algorithm) || string.IsNullOrWhiteSpace(digest))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
algorithm = NormalizeAlgorithm(algorithm, nameof(algorithm));
|
||||
digest = NormalizeDigest(algorithm, digest);
|
||||
return true;
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
internal static string NormalizeAlgorithm(string algorithm, string parameterName)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(algorithm, parameterName);
|
||||
return algorithm.Trim().ToLowerInvariant();
|
||||
}
|
||||
|
||||
internal static string NormalizeDigest(string algorithm, string digest)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(digest, nameof(digest));
|
||||
digest = digest.Trim();
|
||||
|
||||
return algorithm switch
|
||||
{
|
||||
"sha256" => Hex.NormalizeLowerHex(digest, expectedLength: 64, nameof(digest)),
|
||||
"sha512" => Hex.NormalizeLowerHex(digest, expectedLength: 128, nameof(digest)),
|
||||
_ => throw new FormatException($"Unsupported digest algorithm '{algorithm}'.")
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record GenericContentAddressedId(string Algorithm, string Digest) : ContentAddressedId(Algorithm, Digest);
|
||||
|
||||
public sealed record ArtifactId(string Digest) : ContentAddressedId("sha256", Digest)
|
||||
{
|
||||
public new static ArtifactId Parse(string value) => new(ParseSha256(value));
|
||||
public static bool TryParse(string value, out ArtifactId? id) => TryParseSha256(value, out id);
|
||||
|
||||
private static string ParseSha256(string value)
|
||||
{
|
||||
if (!TryParseSha256(value, out var id))
|
||||
{
|
||||
throw new FormatException($"Invalid ArtifactID: '{value}'.");
|
||||
}
|
||||
|
||||
return id!.Digest;
|
||||
}
|
||||
|
||||
private static bool TryParseSha256(string value, out ArtifactId? id)
|
||||
{
|
||||
id = null;
|
||||
|
||||
if (!ContentAddressedId.TrySplit(value, out var algorithm, out var digest))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(algorithm, "sha256", StringComparison.Ordinal))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
id = new ArtifactId(digest);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record EvidenceId(string Digest) : ContentAddressedId("sha256", Digest)
|
||||
{
|
||||
public new static EvidenceId Parse(string value) => new(Sha256IdParser.Parse(value, "EvidenceID"));
|
||||
}
|
||||
|
||||
public sealed record ReasoningId(string Digest) : ContentAddressedId("sha256", Digest)
|
||||
{
|
||||
public new static ReasoningId Parse(string value) => new(Sha256IdParser.Parse(value, "ReasoningID"));
|
||||
}
|
||||
|
||||
public sealed record VexVerdictId(string Digest) : ContentAddressedId("sha256", Digest)
|
||||
{
|
||||
public new static VexVerdictId Parse(string value) => new(Sha256IdParser.Parse(value, "VEXVerdictID"));
|
||||
}
|
||||
|
||||
public sealed record ProofBundleId(string Digest) : ContentAddressedId("sha256", Digest)
|
||||
{
|
||||
public new static ProofBundleId Parse(string value) => new(Sha256IdParser.Parse(value, "ProofBundleID"));
|
||||
}
|
||||
|
||||
internal static class Sha256IdParser
|
||||
{
|
||||
public static string Parse(string value, string kind)
|
||||
{
|
||||
if (!ContentAddressedId.TrySplit(value, out var algorithm, out var digest) ||
|
||||
!string.Equals(algorithm, "sha256", StringComparison.Ordinal))
|
||||
{
|
||||
throw new FormatException($"Invalid {kind}: '{value}'.");
|
||||
}
|
||||
|
||||
return digest;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,154 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Attestor.ProofChain.Json;
|
||||
using StellaOps.Attestor.ProofChain.Merkle;
|
||||
using StellaOps.Attestor.ProofChain.Predicates;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Identifiers;
|
||||
|
||||
public sealed class ContentAddressedIdGenerator : IContentAddressedIdGenerator
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new()
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
PropertyNamingPolicy = null,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
private readonly IJsonCanonicalizer _canonicalizer;
|
||||
private readonly IMerkleTreeBuilder _merkleTreeBuilder;
|
||||
|
||||
public ContentAddressedIdGenerator(IJsonCanonicalizer canonicalizer, IMerkleTreeBuilder merkleTreeBuilder)
|
||||
{
|
||||
_canonicalizer = canonicalizer ?? throw new ArgumentNullException(nameof(canonicalizer));
|
||||
_merkleTreeBuilder = merkleTreeBuilder ?? throw new ArgumentNullException(nameof(merkleTreeBuilder));
|
||||
}
|
||||
|
||||
public EvidenceId ComputeEvidenceId(EvidencePredicate predicate)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(predicate);
|
||||
var canonical = Canonicalize(predicate with { EvidenceId = null });
|
||||
return new EvidenceId(HashSha256Hex(canonical));
|
||||
}
|
||||
|
||||
public ReasoningId ComputeReasoningId(ReasoningPredicate predicate)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(predicate);
|
||||
var canonical = Canonicalize(predicate with { ReasoningId = null });
|
||||
return new ReasoningId(HashSha256Hex(canonical));
|
||||
}
|
||||
|
||||
public VexVerdictId ComputeVexVerdictId(VexPredicate predicate)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(predicate);
|
||||
var canonical = Canonicalize(predicate with { VexVerdictId = null });
|
||||
return new VexVerdictId(HashSha256Hex(canonical));
|
||||
}
|
||||
|
||||
public ProofBundleId ComputeProofBundleId(
|
||||
SbomEntryId sbomEntryId,
|
||||
IReadOnlyList<EvidenceId> evidenceIds,
|
||||
ReasoningId reasoningId,
|
||||
VexVerdictId vexVerdictId)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(sbomEntryId);
|
||||
ArgumentNullException.ThrowIfNull(evidenceIds);
|
||||
ArgumentNullException.ThrowIfNull(reasoningId);
|
||||
ArgumentNullException.ThrowIfNull(vexVerdictId);
|
||||
|
||||
if (evidenceIds.Count == 0)
|
||||
{
|
||||
throw new ArgumentException("At least one EvidenceID is required.", nameof(evidenceIds));
|
||||
}
|
||||
|
||||
var sortedEvidence = new List<string>(evidenceIds.Count);
|
||||
for (var i = 0; i < evidenceIds.Count; i++)
|
||||
{
|
||||
sortedEvidence.Add(evidenceIds[i].ToString());
|
||||
}
|
||||
sortedEvidence.Sort(StringComparer.Ordinal);
|
||||
|
||||
var leaves = new List<ReadOnlyMemory<byte>>(sortedEvidence.Count + 3)
|
||||
{
|
||||
Encoding.UTF8.GetBytes(sbomEntryId.ToString()),
|
||||
};
|
||||
|
||||
foreach (var evidence in sortedEvidence)
|
||||
{
|
||||
leaves.Add(Encoding.UTF8.GetBytes(evidence));
|
||||
}
|
||||
|
||||
leaves.Add(Encoding.UTF8.GetBytes(reasoningId.ToString()));
|
||||
leaves.Add(Encoding.UTF8.GetBytes(vexVerdictId.ToString()));
|
||||
|
||||
var root = _merkleTreeBuilder.ComputeMerkleRoot(leaves);
|
||||
return new ProofBundleId(Convert.ToHexStringLower(root));
|
||||
}
|
||||
|
||||
public GraphRevisionId ComputeGraphRevisionId(
|
||||
IReadOnlyList<string> nodeIds,
|
||||
IReadOnlyList<string> edgeIds,
|
||||
string policyDigest,
|
||||
string feedsDigest,
|
||||
string toolchainDigest,
|
||||
string paramsDigest)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(nodeIds);
|
||||
ArgumentNullException.ThrowIfNull(edgeIds);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(policyDigest);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(feedsDigest);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(toolchainDigest);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(paramsDigest);
|
||||
|
||||
var nodes = new List<string>(nodeIds);
|
||||
nodes.Sort(StringComparer.Ordinal);
|
||||
|
||||
var edges = new List<string>(edgeIds);
|
||||
edges.Sort(StringComparer.Ordinal);
|
||||
|
||||
var leaves = new List<ReadOnlyMemory<byte>>(nodes.Count + edges.Count + 4);
|
||||
foreach (var node in nodes)
|
||||
{
|
||||
leaves.Add(Encoding.UTF8.GetBytes(node));
|
||||
}
|
||||
|
||||
foreach (var edge in edges)
|
||||
{
|
||||
leaves.Add(Encoding.UTF8.GetBytes(edge));
|
||||
}
|
||||
|
||||
leaves.Add(Encoding.UTF8.GetBytes(policyDigest.Trim()));
|
||||
leaves.Add(Encoding.UTF8.GetBytes(feedsDigest.Trim()));
|
||||
leaves.Add(Encoding.UTF8.GetBytes(toolchainDigest.Trim()));
|
||||
leaves.Add(Encoding.UTF8.GetBytes(paramsDigest.Trim()));
|
||||
|
||||
var root = _merkleTreeBuilder.ComputeMerkleRoot(leaves);
|
||||
return new GraphRevisionId(Convert.ToHexStringLower(root));
|
||||
}
|
||||
|
||||
public string ComputeSbomDigest(ReadOnlySpan<byte> sbomJson)
|
||||
{
|
||||
var canonical = _canonicalizer.Canonicalize(sbomJson);
|
||||
return $"sha256:{HashSha256Hex(canonical)}";
|
||||
}
|
||||
|
||||
public SbomEntryId ComputeSbomEntryId(ReadOnlySpan<byte> sbomJson, string purl, string? version = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(purl);
|
||||
var sbomDigest = ComputeSbomDigest(sbomJson);
|
||||
return new SbomEntryId(sbomDigest, purl, version);
|
||||
}
|
||||
|
||||
private byte[] Canonicalize<T>(T value)
|
||||
{
|
||||
var json = JsonSerializer.SerializeToUtf8Bytes(value, SerializerOptions);
|
||||
return _canonicalizer.Canonicalize(json);
|
||||
}
|
||||
|
||||
private static string HashSha256Hex(ReadOnlySpan<byte> bytes)
|
||||
=> Convert.ToHexStringLower(SHA256.HashData(bytes));
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
using System;
|
||||
using StellaOps.Attestor.ProofChain.Internal;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Identifiers;
|
||||
|
||||
public readonly record struct GraphRevisionId
|
||||
{
|
||||
private const string Prefix = "grv_sha256:";
|
||||
|
||||
public GraphRevisionId(string digest)
|
||||
{
|
||||
Digest = Hex.NormalizeLowerHex(digest, expectedLength: 64, nameof(digest));
|
||||
}
|
||||
|
||||
public string Digest { get; }
|
||||
|
||||
public override string ToString() => $"{Prefix}{Digest}";
|
||||
|
||||
public static GraphRevisionId Parse(string value)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(value);
|
||||
|
||||
if (!value.StartsWith(Prefix, StringComparison.Ordinal))
|
||||
{
|
||||
throw new FormatException($"Invalid GraphRevisionID: '{value}'.");
|
||||
}
|
||||
|
||||
return new GraphRevisionId(value[Prefix.Length..]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
using System.Collections.Generic;
|
||||
using System;
|
||||
using StellaOps.Attestor.ProofChain.Identifiers;
|
||||
using StellaOps.Attestor.ProofChain.Predicates;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Identifiers;
|
||||
|
||||
public interface IContentAddressedIdGenerator
|
||||
{
|
||||
EvidenceId ComputeEvidenceId(EvidencePredicate predicate);
|
||||
ReasoningId ComputeReasoningId(ReasoningPredicate predicate);
|
||||
VexVerdictId ComputeVexVerdictId(VexPredicate predicate);
|
||||
|
||||
ProofBundleId ComputeProofBundleId(
|
||||
SbomEntryId sbomEntryId,
|
||||
IReadOnlyList<EvidenceId> evidenceIds,
|
||||
ReasoningId reasoningId,
|
||||
VexVerdictId vexVerdictId);
|
||||
|
||||
GraphRevisionId ComputeGraphRevisionId(
|
||||
IReadOnlyList<string> nodeIds,
|
||||
IReadOnlyList<string> edgeIds,
|
||||
string policyDigest,
|
||||
string feedsDigest,
|
||||
string toolchainDigest,
|
||||
string paramsDigest);
|
||||
|
||||
string ComputeSbomDigest(ReadOnlySpan<byte> sbomJson);
|
||||
|
||||
SbomEntryId ComputeSbomEntryId(ReadOnlySpan<byte> sbomJson, string purl, string? version = null);
|
||||
}
|
||||
@@ -0,0 +1,83 @@
|
||||
using System;
|
||||
using StellaOps.Attestor.ProofChain.Internal;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Identifiers;
|
||||
|
||||
public sealed record SbomEntryId
|
||||
{
|
||||
public SbomEntryId(string sbomDigest, string purl, string? version = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sbomDigest);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(purl);
|
||||
|
||||
SbomDigest = NormalizeSbomDigest(sbomDigest);
|
||||
Purl = purl.Trim();
|
||||
Version = string.IsNullOrWhiteSpace(version) ? null : version.Trim();
|
||||
}
|
||||
|
||||
public string SbomDigest { get; }
|
||||
|
||||
public string Purl { get; }
|
||||
|
||||
public string? Version { get; }
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
var purl = Version is null ? Purl : $"{Purl}@{Version}";
|
||||
return $"{SbomDigest}:{purl}";
|
||||
}
|
||||
|
||||
public static SbomEntryId Parse(string value)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(value);
|
||||
|
||||
// <sbomDigest>:<purl>[@<version>]
|
||||
// where <sbomDigest> is expected to be sha256:<64-hex>
|
||||
var firstColon = value.IndexOf(':', StringComparison.Ordinal);
|
||||
if (firstColon <= 0)
|
||||
{
|
||||
throw new FormatException($"Invalid SBOMEntryID: '{value}'.");
|
||||
}
|
||||
|
||||
var secondColon = value.IndexOf(':', firstColon + 1);
|
||||
if (secondColon <= 0 || secondColon >= value.Length - 1)
|
||||
{
|
||||
throw new FormatException($"Invalid SBOMEntryID: '{value}'.");
|
||||
}
|
||||
|
||||
var algorithm = value[..firstColon];
|
||||
if (!string.Equals(algorithm, "sha256", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
throw new FormatException($"Invalid SBOMEntryID digest algorithm: '{algorithm}'.");
|
||||
}
|
||||
|
||||
var digest = value[(firstColon + 1)..secondColon];
|
||||
digest = Hex.NormalizeLowerHex(digest, expectedLength: 64, nameof(digest));
|
||||
|
||||
var rest = value[(secondColon + 1)..];
|
||||
if (string.IsNullOrWhiteSpace(rest))
|
||||
{
|
||||
throw new FormatException($"Invalid SBOMEntryID: '{value}'.");
|
||||
}
|
||||
|
||||
// Heuristic: split version from PURL only when the '@' is not followed by "sha256:" (OCI digests).
|
||||
var at = rest.LastIndexOf('@');
|
||||
if (at > 0 && at < rest.Length - 1 && !rest[(at + 1)..].StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return new SbomEntryId($"sha256:{digest}", rest[..at], rest[(at + 1)..]);
|
||||
}
|
||||
|
||||
return new SbomEntryId($"sha256:{digest}", rest);
|
||||
}
|
||||
|
||||
private static string NormalizeSbomDigest(string value)
|
||||
{
|
||||
if (!ContentAddressedId.TrySplit(value, out var algorithm, out var digest) ||
|
||||
!string.Equals(algorithm, "sha256", StringComparison.Ordinal))
|
||||
{
|
||||
throw new FormatException($"Invalid SBOM digest: '{value}'.");
|
||||
}
|
||||
|
||||
return $"sha256:{Hex.NormalizeLowerHex(digest, expectedLength: 64, nameof(value))}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
using System;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Identifiers;
|
||||
|
||||
public readonly record struct TrustAnchorId(Guid Value)
|
||||
{
|
||||
public override string ToString() => Value.ToString();
|
||||
|
||||
public static TrustAnchorId Parse(string value)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(value);
|
||||
return new TrustAnchorId(Guid.Parse(value));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
using System;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Internal;
|
||||
|
||||
internal static class Hex
|
||||
{
|
||||
public static string NormalizeLowerHex(string value, int expectedLength, string parameterName)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(value, parameterName);
|
||||
|
||||
value = value.Trim();
|
||||
|
||||
if (value.Length != expectedLength)
|
||||
{
|
||||
throw new FormatException($"Expected {expectedLength} hex characters but got {value.Length}.");
|
||||
}
|
||||
|
||||
for (var i = 0; i < value.Length; i++)
|
||||
{
|
||||
var c = value[i];
|
||||
var isHex = (c is >= '0' and <= '9') ||
|
||||
(c is >= 'a' and <= 'f') ||
|
||||
(c is >= 'A' and <= 'F');
|
||||
if (!isHex)
|
||||
{
|
||||
throw new FormatException($"Invalid hex character '{c}' at position {i}.");
|
||||
}
|
||||
}
|
||||
|
||||
return value.ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
using System;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Json;
|
||||
|
||||
public interface IJsonCanonicalizer
|
||||
{
|
||||
byte[] Canonicalize(ReadOnlySpan<byte> utf8Json);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,150 @@
|
||||
using System;
|
||||
using System.Buffers;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Text.Encodings.Web;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Json;
|
||||
|
||||
/// <summary>
|
||||
/// Implements RFC 8785 JSON Canonicalization Scheme (JCS) for stable hashing.
|
||||
/// </summary>
|
||||
public sealed class Rfc8785JsonCanonicalizer : IJsonCanonicalizer
|
||||
{
|
||||
private static readonly JsonWriterOptions CanonicalWriterOptions = new()
|
||||
{
|
||||
Indented = false,
|
||||
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping
|
||||
};
|
||||
|
||||
public byte[] Canonicalize(ReadOnlySpan<byte> utf8Json)
|
||||
{
|
||||
var reader = new Utf8JsonReader(utf8Json, isFinalBlock: true, state: default);
|
||||
using var document = JsonDocument.ParseValue(ref reader);
|
||||
return Canonicalize(document.RootElement);
|
||||
}
|
||||
|
||||
private static byte[] Canonicalize(JsonElement element)
|
||||
{
|
||||
var buffer = new ArrayBufferWriter<byte>();
|
||||
using (var writer = new Utf8JsonWriter(buffer, CanonicalWriterOptions))
|
||||
{
|
||||
WriteCanonical(writer, element);
|
||||
}
|
||||
|
||||
return buffer.WrittenSpan.ToArray();
|
||||
}
|
||||
|
||||
private static void WriteCanonical(Utf8JsonWriter writer, JsonElement element)
|
||||
{
|
||||
switch (element.ValueKind)
|
||||
{
|
||||
case JsonValueKind.Object:
|
||||
WriteObject(writer, element);
|
||||
return;
|
||||
case JsonValueKind.Array:
|
||||
WriteArray(writer, element);
|
||||
return;
|
||||
case JsonValueKind.String:
|
||||
writer.WriteStringValue(element.GetString());
|
||||
return;
|
||||
case JsonValueKind.Number:
|
||||
WriteNumber(writer, element);
|
||||
return;
|
||||
case JsonValueKind.True:
|
||||
writer.WriteBooleanValue(true);
|
||||
return;
|
||||
case JsonValueKind.False:
|
||||
writer.WriteBooleanValue(false);
|
||||
return;
|
||||
case JsonValueKind.Null:
|
||||
writer.WriteNullValue();
|
||||
return;
|
||||
default:
|
||||
throw new FormatException($"Unsupported JSON token kind '{element.ValueKind}'.");
|
||||
}
|
||||
}
|
||||
|
||||
private static void WriteObject(Utf8JsonWriter writer, JsonElement element)
|
||||
{
|
||||
var properties = new List<(string Name, JsonElement Value)>();
|
||||
foreach (var property in element.EnumerateObject())
|
||||
{
|
||||
properties.Add((property.Name, property.Value));
|
||||
}
|
||||
|
||||
properties.Sort(static (x, y) => string.CompareOrdinal(x.Name, y.Name));
|
||||
|
||||
writer.WriteStartObject();
|
||||
foreach (var (name, value) in properties)
|
||||
{
|
||||
writer.WritePropertyName(name);
|
||||
WriteCanonical(writer, value);
|
||||
}
|
||||
writer.WriteEndObject();
|
||||
}
|
||||
|
||||
private static void WriteArray(Utf8JsonWriter writer, JsonElement element)
|
||||
{
|
||||
writer.WriteStartArray();
|
||||
foreach (var item in element.EnumerateArray())
|
||||
{
|
||||
WriteCanonical(writer, item);
|
||||
}
|
||||
writer.WriteEndArray();
|
||||
}
|
||||
|
||||
private static void WriteNumber(Utf8JsonWriter writer, JsonElement element)
|
||||
{
|
||||
var raw = element.GetRawText();
|
||||
if (!double.TryParse(raw, NumberStyles.Float, CultureInfo.InvariantCulture, out var value) ||
|
||||
double.IsNaN(value) ||
|
||||
double.IsInfinity(value))
|
||||
{
|
||||
throw new FormatException($"Invalid JSON number: '{raw}'.");
|
||||
}
|
||||
|
||||
if (value == 0d)
|
||||
{
|
||||
writer.WriteRawValue("0", skipInputValidation: true);
|
||||
return;
|
||||
}
|
||||
|
||||
var formatted = value.ToString("R", CultureInfo.InvariantCulture);
|
||||
writer.WriteRawValue(NormalizeExponent(formatted), skipInputValidation: true);
|
||||
}
|
||||
|
||||
private static string NormalizeExponent(string formatted)
|
||||
{
|
||||
var e = formatted.IndexOfAny(['E', 'e']);
|
||||
if (e < 0)
|
||||
{
|
||||
return formatted;
|
||||
}
|
||||
|
||||
var mantissa = formatted[..e];
|
||||
var exponent = formatted[(e + 1)..];
|
||||
|
||||
if (string.IsNullOrWhiteSpace(exponent))
|
||||
{
|
||||
return mantissa;
|
||||
}
|
||||
|
||||
var sign = string.Empty;
|
||||
if (exponent[0] is '+' or '-')
|
||||
{
|
||||
sign = exponent[0] == '-' ? "-" : string.Empty;
|
||||
exponent = exponent[1..];
|
||||
}
|
||||
|
||||
exponent = exponent.TrimStart('0');
|
||||
if (exponent.Length == 0)
|
||||
{
|
||||
// 1e0 -> 1
|
||||
return mantissa;
|
||||
}
|
||||
|
||||
return $"{mantissa}e{sign}{exponent}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Security.Cryptography;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Merkle;
|
||||
|
||||
public sealed class DeterministicMerkleTreeBuilder : IMerkleTreeBuilder
|
||||
{
|
||||
public byte[] ComputeMerkleRoot(IReadOnlyList<ReadOnlyMemory<byte>> leafValues)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(leafValues);
|
||||
|
||||
if (leafValues.Count == 0)
|
||||
{
|
||||
throw new ArgumentException("At least one leaf is required.", nameof(leafValues));
|
||||
}
|
||||
|
||||
var hashes = new List<byte[]>(PadToPowerOfTwo(leafValues.Count));
|
||||
for (var i = 0; i < leafValues.Count; i++)
|
||||
{
|
||||
hashes.Add(SHA256.HashData(leafValues[i].Span));
|
||||
}
|
||||
|
||||
// Pad with duplicate of last leaf hash (deterministic).
|
||||
var target = hashes.Capacity;
|
||||
while (hashes.Count < target)
|
||||
{
|
||||
hashes.Add(hashes[^1]);
|
||||
}
|
||||
|
||||
return ComputeRootFromLeafHashes(hashes);
|
||||
}
|
||||
|
||||
private static byte[] ComputeRootFromLeafHashes(List<byte[]> hashes)
|
||||
{
|
||||
while (hashes.Count > 1)
|
||||
{
|
||||
var next = new List<byte[]>(hashes.Count / 2);
|
||||
for (var i = 0; i < hashes.Count; i += 2)
|
||||
{
|
||||
next.Add(HashInternal(hashes[i], hashes[i + 1]));
|
||||
}
|
||||
|
||||
hashes = next;
|
||||
}
|
||||
|
||||
return hashes[0];
|
||||
}
|
||||
|
||||
private static int PadToPowerOfTwo(int count)
|
||||
{
|
||||
var power = 1;
|
||||
while (power < count)
|
||||
{
|
||||
power <<= 1;
|
||||
}
|
||||
return power;
|
||||
}
|
||||
|
||||
private static byte[] HashInternal(byte[] left, byte[] right)
|
||||
{
|
||||
var buffer = new byte[left.Length + right.Length];
|
||||
Buffer.BlockCopy(left, 0, buffer, 0, left.Length);
|
||||
Buffer.BlockCopy(right, 0, buffer, left.Length, right.Length);
|
||||
return SHA256.HashData(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,10 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Merkle;
|
||||
|
||||
public interface IMerkleTreeBuilder
|
||||
{
|
||||
byte[] ComputeMerkleRoot(IReadOnlyList<ReadOnlyMemory<byte>> leafValues);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
using System;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Predicates;
|
||||
|
||||
public sealed record EvidencePredicate
|
||||
{
|
||||
[JsonPropertyName("source")]
|
||||
public required string Source { get; init; }
|
||||
|
||||
[JsonPropertyName("sourceVersion")]
|
||||
public required string SourceVersion { get; init; }
|
||||
|
||||
[JsonPropertyName("collectionTime")]
|
||||
public required DateTimeOffset CollectionTime { get; init; }
|
||||
|
||||
[JsonPropertyName("sbomEntryId")]
|
||||
public required string SbomEntryId { get; init; }
|
||||
|
||||
[JsonPropertyName("vulnerabilityId")]
|
||||
public string? VulnerabilityId { get; init; }
|
||||
|
||||
[JsonPropertyName("rawFinding")]
|
||||
public required object RawFinding { get; init; }
|
||||
|
||||
[JsonPropertyName("evidenceId")]
|
||||
public string? EvidenceId { get; init; }
|
||||
}
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Predicates;
|
||||
|
||||
public sealed record ProofSpinePredicate
|
||||
{
|
||||
[JsonPropertyName("sbomEntryId")]
|
||||
public required string SbomEntryId { get; init; }
|
||||
|
||||
[JsonPropertyName("evidenceIds")]
|
||||
public required IReadOnlyList<string> EvidenceIds { get; init; }
|
||||
|
||||
[JsonPropertyName("reasoningId")]
|
||||
public required string ReasoningId { get; init; }
|
||||
|
||||
[JsonPropertyName("vexVerdictId")]
|
||||
public required string VexVerdictId { get; init; }
|
||||
|
||||
[JsonPropertyName("policyVersion")]
|
||||
public required string PolicyVersion { get; init; }
|
||||
|
||||
[JsonPropertyName("proofBundleId")]
|
||||
public required string ProofBundleId { get; init; }
|
||||
}
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Predicates;
|
||||
|
||||
public sealed record ReasoningPredicate
|
||||
{
|
||||
[JsonPropertyName("sbomEntryId")]
|
||||
public required string SbomEntryId { get; init; }
|
||||
|
||||
[JsonPropertyName("evidenceIds")]
|
||||
public required IReadOnlyList<string> EvidenceIds { get; init; }
|
||||
|
||||
[JsonPropertyName("policyVersion")]
|
||||
public required string PolicyVersion { get; init; }
|
||||
|
||||
[JsonPropertyName("inputs")]
|
||||
public required IReadOnlyDictionary<string, object> Inputs { get; init; }
|
||||
|
||||
[JsonPropertyName("intermediateFindings")]
|
||||
public IReadOnlyDictionary<string, object>? IntermediateFindings { get; init; }
|
||||
|
||||
[JsonPropertyName("reasoningId")]
|
||||
public string? ReasoningId { get; init; }
|
||||
}
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Predicates;
|
||||
|
||||
public sealed record VexPredicate
|
||||
{
|
||||
[JsonPropertyName("sbomEntryId")]
|
||||
public required string SbomEntryId { get; init; }
|
||||
|
||||
[JsonPropertyName("vulnerabilityId")]
|
||||
public required string VulnerabilityId { get; init; }
|
||||
|
||||
[JsonPropertyName("status")]
|
||||
public required string Status { get; init; }
|
||||
|
||||
[JsonPropertyName("justification")]
|
||||
public required string Justification { get; init; }
|
||||
|
||||
[JsonPropertyName("policyVersion")]
|
||||
public required string PolicyVersion { get; init; }
|
||||
|
||||
[JsonPropertyName("reasoningId")]
|
||||
public required string ReasoningId { get; init; }
|
||||
|
||||
[JsonPropertyName("vexVerdictId")]
|
||||
public string? VexVerdictId { get; init; }
|
||||
}
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Attestor.ProofChain.Identifiers;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Sbom;
|
||||
|
||||
public sealed class CycloneDxSubjectExtractor : ISbomSubjectExtractor
|
||||
{
|
||||
private readonly IContentAddressedIdGenerator _idGenerator;
|
||||
|
||||
public CycloneDxSubjectExtractor(IContentAddressedIdGenerator idGenerator)
|
||||
{
|
||||
_idGenerator = idGenerator ?? throw new ArgumentNullException(nameof(idGenerator));
|
||||
}
|
||||
|
||||
public SbomSubjectExtractionResult ExtractCycloneDx(ReadOnlySpan<byte> sbomJson)
|
||||
{
|
||||
var sbomDigest = _idGenerator.ComputeSbomDigest(sbomJson);
|
||||
var entryIds = ExtractEntryIds(sbomDigest, sbomJson);
|
||||
return new SbomSubjectExtractionResult(sbomDigest, entryIds);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<SbomEntryId> ExtractEntryIds(string sbomDigest, ReadOnlySpan<byte> sbomJson)
|
||||
{
|
||||
var reader = new Utf8JsonReader(sbomJson, isFinalBlock: true, state: default);
|
||||
using var document = JsonDocument.ParseValue(ref reader);
|
||||
|
||||
if (document.RootElement.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return Array.Empty<SbomEntryId>();
|
||||
}
|
||||
|
||||
if (!document.RootElement.TryGetProperty("components", out var components) ||
|
||||
components.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
return Array.Empty<SbomEntryId>();
|
||||
}
|
||||
|
||||
var unique = new HashSet<string>(StringComparer.Ordinal);
|
||||
var ids = new List<SbomEntryId>();
|
||||
|
||||
foreach (var component in components.EnumerateArray())
|
||||
{
|
||||
if (component.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!component.TryGetProperty("purl", out var purlElement) ||
|
||||
purlElement.ValueKind != JsonValueKind.String)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var purl = purlElement.GetString();
|
||||
if (string.IsNullOrWhiteSpace(purl))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
string? version = null;
|
||||
if (!purl.Contains('@', StringComparison.Ordinal) &&
|
||||
component.TryGetProperty("version", out var versionElement) &&
|
||||
versionElement.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
version = versionElement.GetString();
|
||||
}
|
||||
|
||||
var id = new SbomEntryId(sbomDigest, purl, version);
|
||||
if (unique.Add(id.ToString()))
|
||||
{
|
||||
ids.Add(id);
|
||||
}
|
||||
}
|
||||
|
||||
ids.Sort(static (x, y) => string.CompareOrdinal(x.ToString(), y.ToString()));
|
||||
return ids;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.Attestor.ProofChain.Identifiers;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Sbom;
|
||||
|
||||
public interface ISbomSubjectExtractor
|
||||
{
|
||||
SbomSubjectExtractionResult ExtractCycloneDx(ReadOnlySpan<byte> sbomJson);
|
||||
}
|
||||
|
||||
public sealed record SbomSubjectExtractionResult(string SbomDigest, IReadOnlyList<SbomEntryId> EntryIds);
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,29 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
|
||||
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include=\"..\\..\\__Libraries\\StellaOps.Attestor.ProofChain\\StellaOps.Attestor.ProofChain.csproj\" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,10 @@
|
||||
namespace StellaOps.Attestor.ProofChain.Tests;
|
||||
|
||||
public class UnitTest1
|
||||
{
|
||||
[Fact]
|
||||
public void Test1()
|
||||
{
|
||||
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Authority.Plugin.Ldap.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Authority.Plugin.Standard.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Authority.Plugins.Abstractions.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,163 @@
|
||||
-- Authority Schema Migration 003: Row-Level Security
|
||||
-- Sprint: SPRINT_3421_0001_0001 - RLS Expansion
|
||||
-- Category: B (release migration, requires coordination)
|
||||
--
|
||||
-- Purpose: Enable Row-Level Security on all tenant-scoped tables in the authority
|
||||
-- schema to provide database-level tenant isolation as defense-in-depth.
|
||||
--
|
||||
-- Note: The tenants table is EXCLUDED from RLS as it defines tenant boundaries.
|
||||
-- All other tables reference tenants(tenant_id) and are RLS-protected.
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 1: Create helper schema and function for tenant context
|
||||
-- ============================================================================
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS authority_app;
|
||||
|
||||
-- Tenant context helper function
|
||||
CREATE OR REPLACE FUNCTION authority_app.require_current_tenant()
|
||||
RETURNS TEXT
|
||||
LANGUAGE plpgsql STABLE SECURITY DEFINER
|
||||
AS $$
|
||||
DECLARE
|
||||
v_tenant TEXT;
|
||||
BEGIN
|
||||
v_tenant := current_setting('app.tenant_id', true);
|
||||
IF v_tenant IS NULL OR v_tenant = '' THEN
|
||||
RAISE EXCEPTION 'app.tenant_id session variable not set'
|
||||
USING HINT = 'Set via: SELECT set_config(''app.tenant_id'', ''<tenant>'', false)',
|
||||
ERRCODE = 'P0001';
|
||||
END IF;
|
||||
RETURN v_tenant;
|
||||
END;
|
||||
$$;
|
||||
|
||||
REVOKE ALL ON FUNCTION authority_app.require_current_tenant() FROM PUBLIC;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 2: Enable RLS on tenant-scoped tables (NOT on tenants table itself)
|
||||
-- ============================================================================
|
||||
|
||||
-- authority.users
|
||||
ALTER TABLE authority.users ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE authority.users FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS users_tenant_isolation ON authority.users;
|
||||
CREATE POLICY users_tenant_isolation ON authority.users
|
||||
FOR ALL
|
||||
USING (tenant_id = authority_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = authority_app.require_current_tenant());
|
||||
|
||||
-- authority.roles
|
||||
ALTER TABLE authority.roles ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE authority.roles FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS roles_tenant_isolation ON authority.roles;
|
||||
CREATE POLICY roles_tenant_isolation ON authority.roles
|
||||
FOR ALL
|
||||
USING (tenant_id = authority_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = authority_app.require_current_tenant());
|
||||
|
||||
-- authority.permissions
|
||||
ALTER TABLE authority.permissions ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE authority.permissions FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS permissions_tenant_isolation ON authority.permissions;
|
||||
CREATE POLICY permissions_tenant_isolation ON authority.permissions
|
||||
FOR ALL
|
||||
USING (tenant_id = authority_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = authority_app.require_current_tenant());
|
||||
|
||||
-- authority.role_permissions (FK-based, inherits from roles)
|
||||
ALTER TABLE authority.role_permissions ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE authority.role_permissions FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS role_permissions_tenant_isolation ON authority.role_permissions;
|
||||
CREATE POLICY role_permissions_tenant_isolation ON authority.role_permissions
|
||||
FOR ALL
|
||||
USING (
|
||||
role_id IN (
|
||||
SELECT id FROM authority.roles
|
||||
WHERE tenant_id = authority_app.require_current_tenant()
|
||||
)
|
||||
);
|
||||
|
||||
-- authority.user_roles (FK-based, inherits from users)
|
||||
ALTER TABLE authority.user_roles ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE authority.user_roles FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS user_roles_tenant_isolation ON authority.user_roles;
|
||||
CREATE POLICY user_roles_tenant_isolation ON authority.user_roles
|
||||
FOR ALL
|
||||
USING (
|
||||
user_id IN (
|
||||
SELECT id FROM authority.users
|
||||
WHERE tenant_id = authority_app.require_current_tenant()
|
||||
)
|
||||
);
|
||||
|
||||
-- authority.api_keys
|
||||
ALTER TABLE authority.api_keys ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE authority.api_keys FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS api_keys_tenant_isolation ON authority.api_keys;
|
||||
CREATE POLICY api_keys_tenant_isolation ON authority.api_keys
|
||||
FOR ALL
|
||||
USING (tenant_id = authority_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = authority_app.require_current_tenant());
|
||||
|
||||
-- authority.tokens
|
||||
ALTER TABLE authority.tokens ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE authority.tokens FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS tokens_tenant_isolation ON authority.tokens;
|
||||
CREATE POLICY tokens_tenant_isolation ON authority.tokens
|
||||
FOR ALL
|
||||
USING (tenant_id = authority_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = authority_app.require_current_tenant());
|
||||
|
||||
-- authority.refresh_tokens
|
||||
ALTER TABLE authority.refresh_tokens ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE authority.refresh_tokens FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS refresh_tokens_tenant_isolation ON authority.refresh_tokens;
|
||||
CREATE POLICY refresh_tokens_tenant_isolation ON authority.refresh_tokens
|
||||
FOR ALL
|
||||
USING (tenant_id = authority_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = authority_app.require_current_tenant());
|
||||
|
||||
-- authority.sessions
|
||||
ALTER TABLE authority.sessions ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE authority.sessions FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS sessions_tenant_isolation ON authority.sessions;
|
||||
CREATE POLICY sessions_tenant_isolation ON authority.sessions
|
||||
FOR ALL
|
||||
USING (tenant_id = authority_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = authority_app.require_current_tenant());
|
||||
|
||||
-- authority.audit
|
||||
ALTER TABLE authority.audit ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE authority.audit FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS audit_tenant_isolation ON authority.audit;
|
||||
CREATE POLICY audit_tenant_isolation ON authority.audit
|
||||
FOR ALL
|
||||
USING (tenant_id = authority_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = authority_app.require_current_tenant());
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 3: Note on tenants table
|
||||
-- ============================================================================
|
||||
|
||||
-- The authority.tenants table is intentionally NOT RLS-protected.
|
||||
-- It defines tenant boundaries and must be accessible for tenant resolution.
|
||||
-- Access control is handled at the application layer.
|
||||
COMMENT ON TABLE authority.tenants IS
|
||||
'Tenant registry. Not RLS-protected - defines tenant boundaries for the system.';
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 4: Create admin bypass role
|
||||
-- ============================================================================
|
||||
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT FROM pg_roles WHERE rolname = 'authority_admin') THEN
|
||||
CREATE ROLE authority_admin WITH NOLOGIN BYPASSRLS;
|
||||
END IF;
|
||||
END
|
||||
$$;
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Cli.Plugins.NonCore.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("FixtureUpdater")]
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Acsc.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Cccs.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.CertBund.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.CertCc.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.CertFr.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.CertIn.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Common.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Cve.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Distro.Debian.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Distro.RedHat.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Distro.Suse.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Distro.Ubuntu.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("FixtureUpdater")]
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Ghsa.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Ics.Cisa.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Ics.Kaspersky.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Jvn.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Kev.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Kisa.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -1,4 +1,6 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Nvd.Tests")]
|
||||
[assembly: InternalsVisibleTo("FixtureUpdater")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("FixtureUpdater")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Ru.Bdu.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Ru.Nkcki.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.StellaOpsMirror.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Vndr.Adobe.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Vndr.Apple.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Vndr.Chromium.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Vndr.Cisco.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Vndr.Msrc.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Vndr.Oracle.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Vndr.Vmware.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
|
||||
@@ -0,0 +1,136 @@
|
||||
-- Vuln Schema Migration 006: Partition Merge Events Table
|
||||
-- Sprint: SPRINT_3422_0001_0001 - Time-Based Partitioning
|
||||
-- Category: C (infrastructure change, requires maintenance window)
|
||||
--
|
||||
-- Purpose: Convert vuln.merge_events to a partitioned table for improved
|
||||
-- query performance on time-range queries and easier data lifecycle management.
|
||||
--
|
||||
-- Partition strategy: Monthly by created_at
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 1: Create partitioned merge_events table
|
||||
-- ============================================================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS vuln.merge_events_partitioned (
|
||||
id BIGSERIAL,
|
||||
advisory_id UUID NOT NULL,
|
||||
source_id UUID,
|
||||
event_type TEXT NOT NULL,
|
||||
old_value JSONB,
|
||||
new_value JSONB,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
PRIMARY KEY (id, created_at)
|
||||
) PARTITION BY RANGE (created_at);
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 2: Create initial partitions (past 12 months + 3 months ahead)
|
||||
-- ============================================================================
|
||||
|
||||
DO $$
|
||||
DECLARE
|
||||
v_start DATE;
|
||||
v_end DATE;
|
||||
v_partition_name TEXT;
|
||||
BEGIN
|
||||
-- Start from 12 months ago (merge events accumulate fast)
|
||||
v_start := date_trunc('month', NOW() - INTERVAL '12 months')::DATE;
|
||||
|
||||
-- Create partitions until 3 months ahead
|
||||
WHILE v_start <= date_trunc('month', NOW() + INTERVAL '3 months')::DATE LOOP
|
||||
v_end := (v_start + INTERVAL '1 month')::DATE;
|
||||
v_partition_name := 'merge_events_' || to_char(v_start, 'YYYY_MM');
|
||||
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_class c
|
||||
JOIN pg_namespace n ON c.relnamespace = n.oid
|
||||
WHERE n.nspname = 'vuln' AND c.relname = v_partition_name
|
||||
) THEN
|
||||
EXECUTE format(
|
||||
'CREATE TABLE vuln.%I PARTITION OF vuln.merge_events_partitioned
|
||||
FOR VALUES FROM (%L) TO (%L)',
|
||||
v_partition_name, v_start, v_end
|
||||
);
|
||||
RAISE NOTICE 'Created partition vuln.%', v_partition_name;
|
||||
END IF;
|
||||
|
||||
v_start := v_end;
|
||||
END LOOP;
|
||||
END
|
||||
$$;
|
||||
|
||||
-- Create default partition for any data outside defined ranges
|
||||
CREATE TABLE IF NOT EXISTS vuln.merge_events_default
|
||||
PARTITION OF vuln.merge_events_partitioned DEFAULT;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 3: Create indexes on partitioned table
|
||||
-- ============================================================================
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_merge_events_part_advisory
|
||||
ON vuln.merge_events_partitioned (advisory_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_merge_events_part_source
|
||||
ON vuln.merge_events_partitioned (source_id)
|
||||
WHERE source_id IS NOT NULL;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_merge_events_part_event_type
|
||||
ON vuln.merge_events_partitioned (event_type);
|
||||
|
||||
-- BRIN index for time-range queries
|
||||
CREATE INDEX IF NOT EXISTS brin_merge_events_part_created
|
||||
ON vuln.merge_events_partitioned USING BRIN (created_at)
|
||||
WITH (pages_per_range = 128);
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 4: Migrate data from old table to partitioned table
|
||||
-- ============================================================================
|
||||
|
||||
INSERT INTO vuln.merge_events_partitioned (
|
||||
id, advisory_id, source_id, event_type, old_value, new_value, created_at
|
||||
)
|
||||
SELECT
|
||||
id, advisory_id, source_id, event_type, old_value, new_value, created_at
|
||||
FROM vuln.merge_events
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 5: Swap tables
|
||||
-- ============================================================================
|
||||
|
||||
-- Drop foreign key constraint first (advisory_id references advisories)
|
||||
ALTER TABLE vuln.merge_events DROP CONSTRAINT IF EXISTS merge_events_advisory_id_fkey;
|
||||
ALTER TABLE vuln.merge_events DROP CONSTRAINT IF EXISTS merge_events_source_id_fkey;
|
||||
|
||||
-- Rename old table to backup
|
||||
ALTER TABLE IF EXISTS vuln.merge_events RENAME TO merge_events_old;
|
||||
|
||||
-- Rename partitioned table to production name
|
||||
ALTER TABLE vuln.merge_events_partitioned RENAME TO merge_events;
|
||||
|
||||
-- Update sequence to continue from max ID
|
||||
DO $$
|
||||
DECLARE
|
||||
v_max_id BIGINT;
|
||||
BEGIN
|
||||
SELECT COALESCE(MAX(id), 0) INTO v_max_id FROM vuln.merge_events;
|
||||
PERFORM setval('vuln.merge_events_id_seq', v_max_id + 1, false);
|
||||
END
|
||||
$$;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 6: Add comment about partitioning strategy
|
||||
-- ============================================================================
|
||||
|
||||
COMMENT ON TABLE vuln.merge_events IS
|
||||
'Advisory merge event log. Partitioned monthly by created_at. FK to advisories removed for partition support.';
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- ============================================================================
|
||||
-- Cleanup (run manually after validation)
|
||||
-- ============================================================================
|
||||
|
||||
-- After confirming the migration is successful, drop the old table:
|
||||
-- DROP TABLE IF EXISTS vuln.merge_events_old;
|
||||
@@ -0,0 +1,141 @@
|
||||
-- Vuln Schema Migration 007: Generated Columns for Advisory Hot Fields
|
||||
-- Sprint: SPRINT_3423_0001_0001 - Generated Columns
|
||||
-- Category: A (safe, can run at startup)
|
||||
--
|
||||
-- Purpose: Extract frequently queried fields from JSONB provenance column
|
||||
-- as generated columns for efficient indexing and filtering.
|
||||
--
|
||||
-- Performance Impact: Queries filtering on these fields will use B-tree indexes
|
||||
-- instead of JSONB operators, improving query time by 10-50x for dashboard queries.
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 1: Add generated columns to vuln.advisories
|
||||
-- ============================================================================
|
||||
|
||||
-- Extract source_key from provenance for fast source filtering
|
||||
ALTER TABLE vuln.advisories
|
||||
ADD COLUMN IF NOT EXISTS provenance_source_key TEXT
|
||||
GENERATED ALWAYS AS (provenance->>'source_key') STORED;
|
||||
|
||||
-- Extract feed_id from provenance for feed correlation
|
||||
ALTER TABLE vuln.advisories
|
||||
ADD COLUMN IF NOT EXISTS provenance_feed_id TEXT
|
||||
GENERATED ALWAYS AS (provenance->>'feed_id') STORED;
|
||||
|
||||
-- Extract ingestion timestamp from provenance for freshness queries
|
||||
ALTER TABLE vuln.advisories
|
||||
ADD COLUMN IF NOT EXISTS provenance_ingested_at TIMESTAMPTZ
|
||||
GENERATED ALWAYS AS ((provenance->>'ingested_at')::TIMESTAMPTZ) STORED;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 2: Create indexes on generated columns
|
||||
-- ============================================================================
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_advisories_provenance_source
|
||||
ON vuln.advisories (provenance_source_key)
|
||||
WHERE provenance_source_key IS NOT NULL;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_advisories_provenance_feed
|
||||
ON vuln.advisories (provenance_feed_id)
|
||||
WHERE provenance_feed_id IS NOT NULL;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_advisories_provenance_ingested
|
||||
ON vuln.advisories (provenance_ingested_at DESC)
|
||||
WHERE provenance_ingested_at IS NOT NULL;
|
||||
|
||||
-- Composite index for common dashboard query pattern
|
||||
CREATE INDEX IF NOT EXISTS ix_advisories_severity_ingested
|
||||
ON vuln.advisories (severity, provenance_ingested_at DESC)
|
||||
WHERE provenance_ingested_at IS NOT NULL;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 3: Add generated columns to vuln.advisory_affected for PURL parsing
|
||||
-- ============================================================================
|
||||
|
||||
-- Extract namespace from purl for namespace-based filtering
|
||||
-- purl format: pkg:type/namespace/name@version
|
||||
ALTER TABLE vuln.advisory_affected
|
||||
ADD COLUMN IF NOT EXISTS purl_type TEXT
|
||||
GENERATED ALWAYS AS (
|
||||
CASE
|
||||
WHEN purl IS NOT NULL AND purl LIKE 'pkg:%'
|
||||
THEN split_part(split_part(purl, ':', 2), '/', 1)
|
||||
ELSE NULL
|
||||
END
|
||||
) STORED;
|
||||
|
||||
-- Extract package name (without namespace) for faster lookups
|
||||
ALTER TABLE vuln.advisory_affected
|
||||
ADD COLUMN IF NOT EXISTS purl_name TEXT
|
||||
GENERATED ALWAYS AS (
|
||||
CASE
|
||||
WHEN purl IS NOT NULL AND purl LIKE 'pkg:%'
|
||||
THEN split_part(split_part(split_part(purl, ':', 2), '@', 1), '/', -1)
|
||||
ELSE NULL
|
||||
END
|
||||
) STORED;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 4: Create indexes on advisory_affected generated columns
|
||||
-- ============================================================================
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_advisory_affected_purl_type
|
||||
ON vuln.advisory_affected (purl_type)
|
||||
WHERE purl_type IS NOT NULL;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_advisory_affected_purl_name
|
||||
ON vuln.advisory_affected (purl_name)
|
||||
WHERE purl_name IS NOT NULL;
|
||||
|
||||
-- Composite index for ecosystem + purl_type queries
|
||||
CREATE INDEX IF NOT EXISTS ix_advisory_affected_ecosystem_type
|
||||
ON vuln.advisory_affected (ecosystem, purl_type)
|
||||
WHERE purl_type IS NOT NULL;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 5: Add comment documenting the generated columns
|
||||
-- ============================================================================
|
||||
|
||||
COMMENT ON COLUMN vuln.advisories.provenance_source_key IS
|
||||
'Generated: Extracted from provenance->>''source_key'' for fast source filtering';
|
||||
|
||||
COMMENT ON COLUMN vuln.advisories.provenance_feed_id IS
|
||||
'Generated: Extracted from provenance->>''feed_id'' for feed correlation';
|
||||
|
||||
COMMENT ON COLUMN vuln.advisories.provenance_ingested_at IS
|
||||
'Generated: Extracted from provenance->>''ingested_at'' for freshness queries';
|
||||
|
||||
COMMENT ON COLUMN vuln.advisory_affected.purl_type IS
|
||||
'Generated: Extracted package type from purl (npm, maven, pypi, etc.)';
|
||||
|
||||
COMMENT ON COLUMN vuln.advisory_affected.purl_name IS
|
||||
'Generated: Extracted package name from purl (without namespace/version)';
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- ============================================================================
|
||||
-- Example Queries (showing performance improvement)
|
||||
-- ============================================================================
|
||||
|
||||
/*
|
||||
-- Before (uses GIN index, slower):
|
||||
SELECT * FROM vuln.advisories
|
||||
WHERE provenance->>'source_key' = 'nvd'
|
||||
AND severity = 'critical'
|
||||
ORDER BY (provenance->>'ingested_at')::timestamptz DESC
|
||||
LIMIT 100;
|
||||
|
||||
-- After (uses B-tree indexes, faster):
|
||||
SELECT * FROM vuln.advisories
|
||||
WHERE provenance_source_key = 'nvd'
|
||||
AND severity = 'critical'
|
||||
ORDER BY provenance_ingested_at DESC
|
||||
LIMIT 100;
|
||||
|
||||
-- Package type filtering (new capability):
|
||||
SELECT * FROM vuln.advisory_affected
|
||||
WHERE purl_type = 'npm'
|
||||
AND ecosystem = 'npm';
|
||||
*/
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Excititor.Connectors.Abstractions.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Excititor.Connectors.Cisco.CSAF.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Excititor.Connectors.MSRC.CSAF.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Excititor.Connectors.Oracle.CSAF.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Excititor.Connectors.RedHat.CSAF.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,102 @@
|
||||
-- VEX Schema Migration 003: Row-Level Security
|
||||
-- Sprint: SPRINT_3421_0001_0001 - RLS Expansion
|
||||
-- Category: B (release migration, requires coordination)
|
||||
--
|
||||
-- Purpose: Enable Row-Level Security on all tenant-scoped tables in the vex
|
||||
-- schema to provide database-level tenant isolation as defense-in-depth.
|
||||
--
|
||||
-- Note: VEX tables use 'tenant' column (not 'tenant_id') as per original schema.
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 1: Create helper schema and function for tenant context
|
||||
-- ============================================================================
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS vex_app;
|
||||
|
||||
-- Tenant context helper function
|
||||
CREATE OR REPLACE FUNCTION vex_app.require_current_tenant()
|
||||
RETURNS TEXT
|
||||
LANGUAGE plpgsql STABLE SECURITY DEFINER
|
||||
AS $$
|
||||
DECLARE
|
||||
v_tenant TEXT;
|
||||
BEGIN
|
||||
v_tenant := current_setting('app.tenant_id', true);
|
||||
IF v_tenant IS NULL OR v_tenant = '' THEN
|
||||
RAISE EXCEPTION 'app.tenant_id session variable not set'
|
||||
USING HINT = 'Set via: SELECT set_config(''app.tenant_id'', ''<tenant>'', false)',
|
||||
ERRCODE = 'P0001';
|
||||
END IF;
|
||||
RETURN v_tenant;
|
||||
END;
|
||||
$$;
|
||||
|
||||
REVOKE ALL ON FUNCTION vex_app.require_current_tenant() FROM PUBLIC;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 2: Enable RLS on all tenant-scoped tables
|
||||
-- ============================================================================
|
||||
|
||||
-- vex.linksets
|
||||
ALTER TABLE vex.linksets ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE vex.linksets FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS linksets_tenant_isolation ON vex.linksets;
|
||||
CREATE POLICY linksets_tenant_isolation ON vex.linksets
|
||||
FOR ALL
|
||||
USING (tenant = vex_app.require_current_tenant())
|
||||
WITH CHECK (tenant = vex_app.require_current_tenant());
|
||||
|
||||
-- vex.linkset_observations (inherits tenant via FK to linksets)
|
||||
ALTER TABLE vex.linkset_observations ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE vex.linkset_observations FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS linkset_observations_tenant_isolation ON vex.linkset_observations;
|
||||
CREATE POLICY linkset_observations_tenant_isolation ON vex.linkset_observations
|
||||
FOR ALL
|
||||
USING (
|
||||
linkset_id IN (
|
||||
SELECT linkset_id FROM vex.linksets
|
||||
WHERE tenant = vex_app.require_current_tenant()
|
||||
)
|
||||
);
|
||||
|
||||
-- vex.linkset_disagreements (inherits tenant via FK to linksets)
|
||||
ALTER TABLE vex.linkset_disagreements ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE vex.linkset_disagreements FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS linkset_disagreements_tenant_isolation ON vex.linkset_disagreements;
|
||||
CREATE POLICY linkset_disagreements_tenant_isolation ON vex.linkset_disagreements
|
||||
FOR ALL
|
||||
USING (
|
||||
linkset_id IN (
|
||||
SELECT linkset_id FROM vex.linksets
|
||||
WHERE tenant = vex_app.require_current_tenant()
|
||||
)
|
||||
);
|
||||
|
||||
-- vex.linkset_mutations (inherits tenant via FK to linksets)
|
||||
ALTER TABLE vex.linkset_mutations ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE vex.linkset_mutations FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS linkset_mutations_tenant_isolation ON vex.linkset_mutations;
|
||||
CREATE POLICY linkset_mutations_tenant_isolation ON vex.linkset_mutations
|
||||
FOR ALL
|
||||
USING (
|
||||
linkset_id IN (
|
||||
SELECT linkset_id FROM vex.linksets
|
||||
WHERE tenant = vex_app.require_current_tenant()
|
||||
)
|
||||
);
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 3: Create admin bypass role
|
||||
-- ============================================================================
|
||||
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT FROM pg_roles WHERE rolname = 'vex_admin') THEN
|
||||
CREATE ROLE vex_admin WITH NOLOGIN BYPASSRLS;
|
||||
END IF;
|
||||
END
|
||||
$$;
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,95 @@
|
||||
-- VEX Schema Migration 004: Generated Columns for JSONB Hot Keys
|
||||
-- Sprint: SPRINT_3423_0001_0001 - Generated Columns for JSONB Hot Keys
|
||||
-- Category: A (safe, can run at startup)
|
||||
--
|
||||
-- Purpose: Extract frequently-queried fields from JSONB columns as generated columns
|
||||
-- to enable efficient B-tree indexing and accurate query planning statistics.
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- vex.vex_raw_documents: Extract metadata and provenance hot fields
|
||||
-- ============================================================================
|
||||
|
||||
-- Format version from metadata (e.g., "openvex/0.2.0", "csaf/2.0")
|
||||
ALTER TABLE vex.vex_raw_documents
|
||||
ADD COLUMN IF NOT EXISTS doc_format_version TEXT
|
||||
GENERATED ALWAYS AS (metadata_json->>'formatVersion') STORED;
|
||||
|
||||
-- Tool name that produced the VEX document
|
||||
ALTER TABLE vex.vex_raw_documents
|
||||
ADD COLUMN IF NOT EXISTS doc_tool_name TEXT
|
||||
GENERATED ALWAYS AS (metadata_json->>'toolName') STORED;
|
||||
|
||||
-- Tool version for provenance tracking
|
||||
ALTER TABLE vex.vex_raw_documents
|
||||
ADD COLUMN IF NOT EXISTS doc_tool_version TEXT
|
||||
GENERATED ALWAYS AS (metadata_json->>'toolVersion') STORED;
|
||||
|
||||
-- Author/supplier from provenance (common filter)
|
||||
ALTER TABLE vex.vex_raw_documents
|
||||
ADD COLUMN IF NOT EXISTS doc_author TEXT
|
||||
GENERATED ALWAYS AS (provenance_json->>'author') STORED;
|
||||
|
||||
-- Timestamp from provenance (useful for ordering)
|
||||
ALTER TABLE vex.vex_raw_documents
|
||||
ADD COLUMN IF NOT EXISTS doc_timestamp TIMESTAMPTZ
|
||||
GENERATED ALWAYS AS ((provenance_json->>'timestamp')::timestamptz) STORED;
|
||||
|
||||
-- ============================================================================
|
||||
-- Indexes on generated columns
|
||||
-- ============================================================================
|
||||
|
||||
-- Index for filtering by format version (common dashboard query)
|
||||
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_vex_raw_docs_format_version
|
||||
ON vex.vex_raw_documents (doc_format_version)
|
||||
WHERE doc_format_version IS NOT NULL;
|
||||
|
||||
-- Index for filtering by tool name (compliance reporting)
|
||||
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_vex_raw_docs_tool_name
|
||||
ON vex.vex_raw_documents (tenant, doc_tool_name)
|
||||
WHERE doc_tool_name IS NOT NULL;
|
||||
|
||||
-- Index for author-based queries (supplier filtering)
|
||||
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_vex_raw_docs_author
|
||||
ON vex.vex_raw_documents (tenant, doc_author)
|
||||
WHERE doc_author IS NOT NULL;
|
||||
|
||||
-- Composite index for time-ordered tool queries
|
||||
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_vex_raw_docs_tool_time
|
||||
ON vex.vex_raw_documents (tenant, doc_tool_name, doc_timestamp DESC)
|
||||
WHERE doc_tool_name IS NOT NULL;
|
||||
|
||||
-- Covering index for document listing dashboard
|
||||
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_vex_raw_docs_listing
|
||||
ON vex.vex_raw_documents (tenant, retrieved_at DESC)
|
||||
INCLUDE (format, doc_format_version, doc_tool_name, doc_author);
|
||||
|
||||
-- Update statistics for query planner
|
||||
ANALYZE vex.vex_raw_documents;
|
||||
|
||||
COMMIT;
|
||||
|
||||
-- ============================================================================
|
||||
-- Verification queries (run manually to confirm):
|
||||
-- ============================================================================
|
||||
--
|
||||
-- -- Check generated columns are populated:
|
||||
-- SELECT
|
||||
-- digest,
|
||||
-- format,
|
||||
-- doc_format_version,
|
||||
-- doc_tool_name,
|
||||
-- doc_author,
|
||||
-- doc_timestamp
|
||||
-- FROM vex.vex_raw_documents
|
||||
-- LIMIT 10;
|
||||
--
|
||||
-- -- Verify index usage:
|
||||
-- EXPLAIN ANALYZE
|
||||
-- SELECT digest, doc_tool_name, doc_timestamp
|
||||
-- FROM vex.vex_raw_documents
|
||||
-- WHERE tenant = 'test-tenant'
|
||||
-- AND doc_tool_name = 'trivy'
|
||||
-- ORDER BY doc_timestamp DESC
|
||||
-- LIMIT 20;
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Notify.Connectors.Email.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Notify.Connectors.Shared.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Notify.Connectors.Slack.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Notify.Connectors.Teams.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,5 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using StellaOps.Plugin.Versioning;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Notify.Connectors.Webhook.Tests")]
|
||||
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]
|
||||
@@ -0,0 +1,178 @@
|
||||
-- Notify Schema Migration 010: Row-Level Security
|
||||
-- Sprint: SPRINT_3421_0001_0001 - RLS Expansion
|
||||
-- Category: B (release migration, requires coordination)
|
||||
--
|
||||
-- Purpose: Enable Row-Level Security on all tenant-scoped tables in the notify
|
||||
-- schema to provide database-level tenant isolation as defense-in-depth.
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 1: Create helper schema and function for tenant context
|
||||
-- ============================================================================
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS notify_app;
|
||||
|
||||
-- Tenant context helper function
|
||||
CREATE OR REPLACE FUNCTION notify_app.require_current_tenant()
|
||||
RETURNS TEXT
|
||||
LANGUAGE plpgsql STABLE SECURITY DEFINER
|
||||
AS $$
|
||||
DECLARE
|
||||
v_tenant TEXT;
|
||||
BEGIN
|
||||
v_tenant := current_setting('app.tenant_id', true);
|
||||
IF v_tenant IS NULL OR v_tenant = '' THEN
|
||||
RAISE EXCEPTION 'app.tenant_id session variable not set'
|
||||
USING HINT = 'Set via: SELECT set_config(''app.tenant_id'', ''<tenant>'', false)',
|
||||
ERRCODE = 'P0001';
|
||||
END IF;
|
||||
RETURN v_tenant;
|
||||
END;
|
||||
$$;
|
||||
|
||||
REVOKE ALL ON FUNCTION notify_app.require_current_tenant() FROM PUBLIC;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 2: Enable RLS on all tenant-scoped tables
|
||||
-- ============================================================================
|
||||
|
||||
-- notify.channels
|
||||
ALTER TABLE notify.channels ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE notify.channels FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS channels_tenant_isolation ON notify.channels;
|
||||
CREATE POLICY channels_tenant_isolation ON notify.channels
|
||||
FOR ALL
|
||||
USING (tenant_id = notify_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = notify_app.require_current_tenant());
|
||||
|
||||
-- notify.rules
|
||||
ALTER TABLE notify.rules ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE notify.rules FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS rules_tenant_isolation ON notify.rules;
|
||||
CREATE POLICY rules_tenant_isolation ON notify.rules
|
||||
FOR ALL
|
||||
USING (tenant_id = notify_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = notify_app.require_current_tenant());
|
||||
|
||||
-- notify.templates
|
||||
ALTER TABLE notify.templates ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE notify.templates FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS templates_tenant_isolation ON notify.templates;
|
||||
CREATE POLICY templates_tenant_isolation ON notify.templates
|
||||
FOR ALL
|
||||
USING (tenant_id = notify_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = notify_app.require_current_tenant());
|
||||
|
||||
-- notify.deliveries
|
||||
ALTER TABLE notify.deliveries ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE notify.deliveries FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS deliveries_tenant_isolation ON notify.deliveries;
|
||||
CREATE POLICY deliveries_tenant_isolation ON notify.deliveries
|
||||
FOR ALL
|
||||
USING (tenant_id = notify_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = notify_app.require_current_tenant());
|
||||
|
||||
-- notify.digests
|
||||
ALTER TABLE notify.digests ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE notify.digests FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS digests_tenant_isolation ON notify.digests;
|
||||
CREATE POLICY digests_tenant_isolation ON notify.digests
|
||||
FOR ALL
|
||||
USING (tenant_id = notify_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = notify_app.require_current_tenant());
|
||||
|
||||
-- notify.quiet_hours
|
||||
ALTER TABLE notify.quiet_hours ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE notify.quiet_hours FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS quiet_hours_tenant_isolation ON notify.quiet_hours;
|
||||
CREATE POLICY quiet_hours_tenant_isolation ON notify.quiet_hours
|
||||
FOR ALL
|
||||
USING (tenant_id = notify_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = notify_app.require_current_tenant());
|
||||
|
||||
-- notify.maintenance_windows
|
||||
ALTER TABLE notify.maintenance_windows ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE notify.maintenance_windows FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS maintenance_windows_tenant_isolation ON notify.maintenance_windows;
|
||||
CREATE POLICY maintenance_windows_tenant_isolation ON notify.maintenance_windows
|
||||
FOR ALL
|
||||
USING (tenant_id = notify_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = notify_app.require_current_tenant());
|
||||
|
||||
-- notify.escalation_policies
|
||||
ALTER TABLE notify.escalation_policies ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE notify.escalation_policies FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS escalation_policies_tenant_isolation ON notify.escalation_policies;
|
||||
CREATE POLICY escalation_policies_tenant_isolation ON notify.escalation_policies
|
||||
FOR ALL
|
||||
USING (tenant_id = notify_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = notify_app.require_current_tenant());
|
||||
|
||||
-- notify.escalation_states
|
||||
ALTER TABLE notify.escalation_states ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE notify.escalation_states FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS escalation_states_tenant_isolation ON notify.escalation_states;
|
||||
CREATE POLICY escalation_states_tenant_isolation ON notify.escalation_states
|
||||
FOR ALL
|
||||
USING (tenant_id = notify_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = notify_app.require_current_tenant());
|
||||
|
||||
-- notify.on_call_schedules
|
||||
ALTER TABLE notify.on_call_schedules ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE notify.on_call_schedules FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS on_call_schedules_tenant_isolation ON notify.on_call_schedules;
|
||||
CREATE POLICY on_call_schedules_tenant_isolation ON notify.on_call_schedules
|
||||
FOR ALL
|
||||
USING (tenant_id = notify_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = notify_app.require_current_tenant());
|
||||
|
||||
-- notify.inbox
|
||||
ALTER TABLE notify.inbox ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE notify.inbox FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS inbox_tenant_isolation ON notify.inbox;
|
||||
CREATE POLICY inbox_tenant_isolation ON notify.inbox
|
||||
FOR ALL
|
||||
USING (tenant_id = notify_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = notify_app.require_current_tenant());
|
||||
|
||||
-- notify.incidents
|
||||
ALTER TABLE notify.incidents ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE notify.incidents FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS incidents_tenant_isolation ON notify.incidents;
|
||||
CREATE POLICY incidents_tenant_isolation ON notify.incidents
|
||||
FOR ALL
|
||||
USING (tenant_id = notify_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = notify_app.require_current_tenant());
|
||||
|
||||
-- notify.audit
|
||||
ALTER TABLE notify.audit ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE notify.audit FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS audit_tenant_isolation ON notify.audit;
|
||||
CREATE POLICY audit_tenant_isolation ON notify.audit
|
||||
FOR ALL
|
||||
USING (tenant_id = notify_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = notify_app.require_current_tenant());
|
||||
|
||||
-- notify.locks
|
||||
ALTER TABLE notify.locks ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE notify.locks FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS locks_tenant_isolation ON notify.locks;
|
||||
CREATE POLICY locks_tenant_isolation ON notify.locks
|
||||
FOR ALL
|
||||
USING (tenant_id = notify_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = notify_app.require_current_tenant());
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 3: Create admin bypass role
|
||||
-- ============================================================================
|
||||
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT FROM pg_roles WHERE rolname = 'notify_admin') THEN
|
||||
CREATE ROLE notify_admin WITH NOLOGIN BYPASSRLS;
|
||||
END IF;
|
||||
END
|
||||
$$;
|
||||
|
||||
COMMIT;
|
||||
@@ -1,4 +1,5 @@
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Policy.Scoring;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Scoring;
|
||||
|
||||
@@ -142,4 +143,18 @@ public sealed record RiskScoringResult(
|
||||
[property: JsonPropertyName("signal_contributions")] IReadOnlyDictionary<string, double> SignalContributions,
|
||||
[property: JsonPropertyName("override_applied")] string? OverrideApplied,
|
||||
[property: JsonPropertyName("override_reason")] string? OverrideReason,
|
||||
[property: JsonPropertyName("scored_at")] DateTimeOffset ScoredAt);
|
||||
[property: JsonPropertyName("scored_at")] DateTimeOffset ScoredAt)
|
||||
{
|
||||
private IReadOnlyList<ScoreExplanation> _explain = Array.Empty<ScoreExplanation>();
|
||||
|
||||
/// <summary>
|
||||
/// Structured explanation of score contributions.
|
||||
/// Sorted deterministically by factor name.
|
||||
/// </summary>
|
||||
[JsonPropertyName("explain")]
|
||||
public IReadOnlyList<ScoreExplanation> Explain
|
||||
{
|
||||
get => _explain;
|
||||
init => _explain = value ?? Array.Empty<ScoreExplanation>();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,3 +5,4 @@ This file mirrors sprint work for the Policy Engine module.
|
||||
| Task ID | Sprint | Status | Notes |
|
||||
| --- | --- | --- | --- |
|
||||
| `POLICY-GATE-401-033` | `docs/implplan/SPRINT_0401_0001_0001_reachability_evidence_chain.md` | DONE (2025-12-13) | Implemented PolicyGateEvaluator (lattice/uncertainty/evidence completeness) and aligned tests/docs; see `src/Policy/StellaOps.Policy.Engine/Gates/PolicyGateEvaluator.cs` and `src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Gates/PolicyGateEvaluatorTests.cs`. |
|
||||
| `DET-3401-011` | `docs/implplan/SPRINT_3401_0001_0001_determinism_scoring_foundations.md` | DONE (2025-12-14) | Added `Explain` to `RiskScoringResult` and covered JSON serialization + null-coercion in `src/Policy/__Tests/StellaOps.Policy.Engine.Tests/Scoring/RiskScoringResultTests.cs`. |
|
||||
|
||||
128
src/Policy/StellaOps.Policy.Scoring/Engine/CvssEngineFactory.cs
Normal file
128
src/Policy/StellaOps.Policy.Scoring/Engine/CvssEngineFactory.cs
Normal file
@@ -0,0 +1,128 @@
|
||||
namespace StellaOps.Policy.Scoring.Engine;
|
||||
|
||||
/// <summary>
|
||||
/// Factory for creating CVSS engines and detecting versions from vector strings.
|
||||
/// </summary>
|
||||
public sealed class CvssEngineFactory : ICvssEngineFactory
|
||||
{
|
||||
private readonly ICvssV4Engine _v4Engine;
|
||||
private readonly CvssV3Engine _v31Engine;
|
||||
private readonly CvssV3Engine _v30Engine;
|
||||
private readonly CvssV2Engine _v2Engine;
|
||||
|
||||
public CvssEngineFactory(ICvssV4Engine? v4Engine = null)
|
||||
{
|
||||
_v4Engine = v4Engine ?? new CvssV4Engine();
|
||||
_v31Engine = new CvssV3Engine(CvssVersion.V3_1);
|
||||
_v30Engine = new CvssV3Engine(CvssVersion.V3_0);
|
||||
_v2Engine = new CvssV2Engine();
|
||||
}
|
||||
|
||||
public ICvssEngine Create(CvssVersion version) => version switch
|
||||
{
|
||||
CvssVersion.V2 => _v2Engine,
|
||||
CvssVersion.V3_0 => _v30Engine,
|
||||
CvssVersion.V3_1 => _v31Engine,
|
||||
CvssVersion.V4_0 => new CvssV4EngineAdapter(_v4Engine),
|
||||
_ => throw new ArgumentOutOfRangeException(nameof(version), version, "Unsupported CVSS version")
|
||||
};
|
||||
|
||||
public CvssVersion? DetectVersion(string vectorString)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(vectorString))
|
||||
return null;
|
||||
|
||||
var trimmed = vectorString.Trim();
|
||||
|
||||
// CVSS v4.0: "CVSS:4.0/..."
|
||||
if (trimmed.StartsWith("CVSS:4.0/", StringComparison.OrdinalIgnoreCase))
|
||||
return CvssVersion.V4_0;
|
||||
|
||||
// CVSS v3.1: "CVSS:3.1/..."
|
||||
if (trimmed.StartsWith("CVSS:3.1/", StringComparison.OrdinalIgnoreCase))
|
||||
return CvssVersion.V3_1;
|
||||
|
||||
// CVSS v3.0: "CVSS:3.0/..."
|
||||
if (trimmed.StartsWith("CVSS:3.0/", StringComparison.OrdinalIgnoreCase))
|
||||
return CvssVersion.V3_0;
|
||||
|
||||
// CVSS v2.0: No prefix or "CVSS2#", contains "Au:" (Authentication)
|
||||
if (trimmed.Contains("Au:", StringComparison.OrdinalIgnoreCase) ||
|
||||
trimmed.StartsWith("CVSS2#", StringComparison.OrdinalIgnoreCase))
|
||||
return CvssVersion.V2;
|
||||
|
||||
// Try to detect by metric patterns
|
||||
// v4.0 unique: AT: (Attack Requirements)
|
||||
if (trimmed.Contains("/AT:", StringComparison.OrdinalIgnoreCase))
|
||||
return CvssVersion.V4_0;
|
||||
|
||||
// v3.x unique: PR: (Privileges Required), S: (Scope)
|
||||
if (trimmed.Contains("/PR:", StringComparison.OrdinalIgnoreCase) &&
|
||||
trimmed.Contains("/S:", StringComparison.OrdinalIgnoreCase))
|
||||
return CvssVersion.V3_1; // Default to 3.1 if unspecified
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public CvssVersionedScore ComputeFromVector(string vectorString)
|
||||
{
|
||||
var version = DetectVersion(vectorString);
|
||||
if (version is null)
|
||||
throw new ArgumentException($"Unable to detect CVSS version from vector: {vectorString}", nameof(vectorString));
|
||||
|
||||
var engine = Create(version.Value);
|
||||
return engine.ComputeFromVector(vectorString);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adapter to make ICvssV4Engine compatible with ICvssEngine interface.
|
||||
/// </summary>
|
||||
internal sealed class CvssV4EngineAdapter : ICvssEngine
|
||||
{
|
||||
private readonly ICvssV4Engine _engine;
|
||||
|
||||
public CvssV4EngineAdapter(ICvssV4Engine engine)
|
||||
{
|
||||
_engine = engine ?? throw new ArgumentNullException(nameof(engine));
|
||||
}
|
||||
|
||||
public CvssVersion Version => CvssVersion.V4_0;
|
||||
|
||||
public CvssVersionedScore ComputeFromVector(string vectorString)
|
||||
{
|
||||
var metrics = _engine.ParseVector(vectorString);
|
||||
var scores = _engine.ComputeScores(metrics.BaseMetrics, metrics.ThreatMetrics, metrics.EnvironmentalMetrics);
|
||||
var vector = _engine.BuildVectorString(metrics.BaseMetrics, metrics.ThreatMetrics, metrics.EnvironmentalMetrics, metrics.SupplementalMetrics);
|
||||
var severity = _engine.GetSeverity(scores.EffectiveScore);
|
||||
|
||||
return new CvssVersionedScore
|
||||
{
|
||||
Version = CvssVersion.V4_0,
|
||||
BaseScore = scores.BaseScore,
|
||||
TemporalScore = scores.ThreatScore > 0 && scores.ThreatScore != scores.BaseScore ? scores.ThreatScore : null,
|
||||
EnvironmentalScore = scores.EnvironmentalScore > 0 && scores.EnvironmentalScore != scores.BaseScore ? scores.EnvironmentalScore : null,
|
||||
EffectiveScore = scores.EffectiveScore,
|
||||
Severity = severity.ToString(),
|
||||
VectorString = vector
|
||||
};
|
||||
}
|
||||
|
||||
public bool IsValidVector(string vectorString)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(vectorString))
|
||||
return false;
|
||||
|
||||
try
|
||||
{
|
||||
_engine.ParseVector(vectorString);
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public string GetSeverityLabel(double score) => _engine.GetSeverity(score).ToString();
|
||||
}
|
||||
211
src/Policy/StellaOps.Policy.Scoring/Engine/CvssV2Engine.cs
Normal file
211
src/Policy/StellaOps.Policy.Scoring/Engine/CvssV2Engine.cs
Normal file
@@ -0,0 +1,211 @@
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace StellaOps.Policy.Scoring.Engine;
|
||||
|
||||
/// <summary>
|
||||
/// CVSS v2.0 scoring engine per FIRST specification.
|
||||
/// https://www.first.org/cvss/v2/guide
|
||||
/// </summary>
|
||||
public sealed partial class CvssV2Engine : ICvssEngine
|
||||
{
|
||||
public CvssVersion Version => CvssVersion.V2;
|
||||
|
||||
// CVSS v2 vector pattern - supports base, temporal, and environmental metric groups
|
||||
// Base: AV:N/AC:L/Au:N/C:C/I:C/A:C
|
||||
// Temporal: E:POC/RL:OF/RC:C (E can be U/POC/F/H/ND, RL can be OF/TF/W/U/ND, RC can be UC/UR/C/ND)
|
||||
// Environmental: CDP:N/TD:N/CR:M/IR:M/AR:M
|
||||
[GeneratedRegex(@"^(?:CVSS2#)?AV:([LAN])/AC:([HML])/Au:([MSN])/C:([NPC])/I:([NPC])/A:([NPC])(?:/E:(U|POC|F|H|ND)/RL:(OF|TF|W|U|ND)/RC:(UC|UR|C|ND))?(?:/CDP:(N|L|LM|MH|H|ND)/TD:(N|L|M|H|ND)/CR:(L|M|H|ND)/IR:(L|M|H|ND)/AR:(L|M|H|ND))?$", RegexOptions.IgnoreCase)]
|
||||
private static partial Regex VectorPattern();
|
||||
|
||||
public CvssVersionedScore ComputeFromVector(string vectorString)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(vectorString);
|
||||
|
||||
var match = VectorPattern().Match(vectorString.Trim());
|
||||
if (!match.Success)
|
||||
throw new ArgumentException($"Invalid CVSS v2.0 vector string: {vectorString}", nameof(vectorString));
|
||||
|
||||
// Parse base metrics
|
||||
var av = ParseAccessVector(match.Groups[1].Value);
|
||||
var ac = ParseAccessComplexity(match.Groups[2].Value);
|
||||
var au = ParseAuthentication(match.Groups[3].Value);
|
||||
var c = ParseImpact(match.Groups[4].Value);
|
||||
var i = ParseImpact(match.Groups[5].Value);
|
||||
var a = ParseImpact(match.Groups[6].Value);
|
||||
|
||||
// Compute base score
|
||||
var impact = 10.41 * (1 - (1 - c) * (1 - i) * (1 - a));
|
||||
var exploitability = 20 * av * ac * au;
|
||||
var fImpact = impact == 0 ? 0 : 1.176;
|
||||
var baseScore = Math.Round(((0.6 * impact) + (0.4 * exploitability) - 1.5) * fImpact, 1, MidpointRounding.AwayFromZero);
|
||||
baseScore = Math.Clamp(baseScore, 0, 10);
|
||||
|
||||
// Parse temporal metrics if present
|
||||
double? temporalScore = null;
|
||||
if (match.Groups[7].Success)
|
||||
{
|
||||
var e = ParseExploitability(match.Groups[7].Value);
|
||||
var rl = ParseRemediationLevel(match.Groups[8].Value);
|
||||
var rc = ParseReportConfidence(match.Groups[9].Value);
|
||||
temporalScore = Math.Round(baseScore * e * rl * rc, 1, MidpointRounding.AwayFromZero);
|
||||
}
|
||||
|
||||
// Parse environmental metrics if present
|
||||
double? environmentalScore = null;
|
||||
if (match.Groups[10].Success)
|
||||
{
|
||||
var cdp = ParseCollateralDamagePotential(match.Groups[10].Value);
|
||||
var td = ParseTargetDistribution(match.Groups[11].Value);
|
||||
var cr = ParseRequirement(match.Groups[12].Value);
|
||||
var ir = ParseRequirement(match.Groups[13].Value);
|
||||
var ar = ParseRequirement(match.Groups[14].Value);
|
||||
|
||||
var adjustedImpact = Math.Min(10, 10.41 * (1 - (1 - c * cr) * (1 - i * ir) * (1 - a * ar)));
|
||||
var adjustedBase = Math.Round(((0.6 * adjustedImpact) + (0.4 * exploitability) - 1.5) * fImpact, 1, MidpointRounding.AwayFromZero);
|
||||
|
||||
var tempScoreForEnv = temporalScore ?? baseScore;
|
||||
if (match.Groups[7].Success)
|
||||
{
|
||||
var e = ParseExploitability(match.Groups[7].Value);
|
||||
var rl = ParseRemediationLevel(match.Groups[8].Value);
|
||||
var rc = ParseReportConfidence(match.Groups[9].Value);
|
||||
adjustedBase = Math.Round(adjustedBase * e * rl * rc, 1, MidpointRounding.AwayFromZero);
|
||||
}
|
||||
|
||||
environmentalScore = Math.Round((adjustedBase + (10 - adjustedBase) * cdp) * td, 1, MidpointRounding.AwayFromZero);
|
||||
environmentalScore = Math.Clamp(environmentalScore.Value, 0, 10);
|
||||
}
|
||||
|
||||
var effectiveScore = environmentalScore ?? temporalScore ?? baseScore;
|
||||
|
||||
return new CvssVersionedScore
|
||||
{
|
||||
Version = CvssVersion.V2,
|
||||
BaseScore = baseScore,
|
||||
TemporalScore = temporalScore,
|
||||
EnvironmentalScore = environmentalScore,
|
||||
EffectiveScore = effectiveScore,
|
||||
Severity = GetSeverityLabel(effectiveScore),
|
||||
VectorString = NormalizeVector(vectorString)
|
||||
};
|
||||
}
|
||||
|
||||
public bool IsValidVector(string vectorString)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(vectorString))
|
||||
return false;
|
||||
return VectorPattern().IsMatch(vectorString.Trim());
|
||||
}
|
||||
|
||||
public string GetSeverityLabel(double score) => score switch
|
||||
{
|
||||
>= 7.0 => "High",
|
||||
>= 4.0 => "Medium",
|
||||
> 0 => "Low",
|
||||
_ => "None"
|
||||
};
|
||||
|
||||
private static string NormalizeVector(string vector)
|
||||
{
|
||||
// Ensure consistent casing and format
|
||||
var normalized = vector.Trim().ToUpperInvariant();
|
||||
if (!normalized.StartsWith("CVSS2#", StringComparison.Ordinal))
|
||||
normalized = "CVSS2#" + normalized;
|
||||
return normalized;
|
||||
}
|
||||
|
||||
// Access Vector (AV)
|
||||
private static double ParseAccessVector(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"L" => 0.395, // Local
|
||||
"A" => 0.646, // Adjacent Network
|
||||
"N" => 1.0, // Network
|
||||
_ => throw new ArgumentException($"Invalid Access Vector: {value}")
|
||||
};
|
||||
|
||||
// Access Complexity (AC)
|
||||
private static double ParseAccessComplexity(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"H" => 0.35, // High
|
||||
"M" => 0.61, // Medium
|
||||
"L" => 0.71, // Low
|
||||
_ => throw new ArgumentException($"Invalid Access Complexity: {value}")
|
||||
};
|
||||
|
||||
// Authentication (Au) - v2 specific
|
||||
private static double ParseAuthentication(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"M" => 0.45, // Multiple
|
||||
"S" => 0.56, // Single
|
||||
"N" => 0.704, // None
|
||||
_ => throw new ArgumentException($"Invalid Authentication: {value}")
|
||||
};
|
||||
|
||||
// Impact (C/I/A)
|
||||
private static double ParseImpact(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"N" => 0, // None
|
||||
"P" => 0.275, // Partial
|
||||
"C" => 0.660, // Complete
|
||||
_ => throw new ArgumentException($"Invalid Impact: {value}")
|
||||
};
|
||||
|
||||
// Exploitability (E)
|
||||
private static double ParseExploitability(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"U" or "ND" => 1.0, // Unproven / Not Defined
|
||||
"POC" or "P" => 0.9, // Proof of Concept
|
||||
"F" => 0.95, // Functional
|
||||
"H" => 1.0, // High
|
||||
_ => 1.0
|
||||
};
|
||||
|
||||
// Remediation Level (RL)
|
||||
private static double ParseRemediationLevel(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"OF" or "O" => 0.87, // Official Fix
|
||||
"TF" or "T" => 0.90, // Temporary Fix
|
||||
"W" => 0.95, // Workaround
|
||||
"U" or "ND" => 1.0, // Unavailable / Not Defined
|
||||
_ => 1.0
|
||||
};
|
||||
|
||||
// Report Confidence (RC)
|
||||
private static double ParseReportConfidence(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"UC" or "U" => 0.9, // Unconfirmed
|
||||
"UR" => 0.95, // Uncorroborated
|
||||
"C" or "ND" => 1.0, // Confirmed / Not Defined
|
||||
_ => 1.0
|
||||
};
|
||||
|
||||
// Collateral Damage Potential (CDP)
|
||||
private static double ParseCollateralDamagePotential(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"N" or "ND" => 0,
|
||||
"L" => 0.1,
|
||||
"LM" => 0.3,
|
||||
"MH" => 0.4,
|
||||
"H" => 0.5,
|
||||
_ => 0
|
||||
};
|
||||
|
||||
// Target Distribution (TD)
|
||||
private static double ParseTargetDistribution(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"N" or "ND" => 1.0,
|
||||
"L" => 0.25,
|
||||
"M" => 0.75,
|
||||
"H" => 1.0,
|
||||
_ => 1.0
|
||||
};
|
||||
|
||||
// Security Requirements (CR/IR/AR)
|
||||
private static double ParseRequirement(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"L" => 0.5,
|
||||
"M" or "ND" => 1.0,
|
||||
"H" => 1.51,
|
||||
_ => 1.0
|
||||
};
|
||||
}
|
||||
350
src/Policy/StellaOps.Policy.Scoring/Engine/CvssV3Engine.cs
Normal file
350
src/Policy/StellaOps.Policy.Scoring/Engine/CvssV3Engine.cs
Normal file
@@ -0,0 +1,350 @@
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace StellaOps.Policy.Scoring.Engine;
|
||||
|
||||
/// <summary>
|
||||
/// CVSS v3.0/v3.1 scoring engine per FIRST specification.
|
||||
/// https://www.first.org/cvss/v3.1/specification-document
|
||||
/// </summary>
|
||||
public sealed partial class CvssV3Engine : ICvssEngine
|
||||
{
|
||||
private readonly CvssVersion _version;
|
||||
|
||||
public CvssV3Engine(CvssVersion version = CvssVersion.V3_1)
|
||||
{
|
||||
if (version != CvssVersion.V3_0 && version != CvssVersion.V3_1)
|
||||
throw new ArgumentException("Version must be V3_0 or V3_1", nameof(version));
|
||||
_version = version;
|
||||
}
|
||||
|
||||
public CvssVersion Version => _version;
|
||||
|
||||
// CVSS v3 vector pattern
|
||||
[GeneratedRegex(@"^CVSS:3\.[01]/AV:([NALP])/AC:([LH])/PR:([NLH])/UI:([NR])/S:([UC])/C:([NLH])/I:([NLH])/A:([NLH])(?:/E:([XUPFH])/RL:([XOTWU])/RC:([XURC]))?(?:/CR:([XLMH])/IR:([XLMH])/AR:([XLMH]))?(?:/MAV:([XNALP])/MAC:([XLH])/MPR:([XNLH])/MUI:([XNR])/MS:([XUC])/MC:([XNLH])/MI:([XNLH])/MA:([XNLH]))?$", RegexOptions.IgnoreCase)]
|
||||
private static partial Regex VectorPattern();
|
||||
|
||||
public CvssVersionedScore ComputeFromVector(string vectorString)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(vectorString);
|
||||
|
||||
var match = VectorPattern().Match(vectorString.Trim());
|
||||
if (!match.Success)
|
||||
throw new ArgumentException($"Invalid CVSS v3.x vector string: {vectorString}", nameof(vectorString));
|
||||
|
||||
// Parse base metrics
|
||||
var av = ParseAttackVector(match.Groups[1].Value);
|
||||
var ac = ParseAttackComplexity(match.Groups[2].Value);
|
||||
var pr = ParsePrivilegesRequired(match.Groups[3].Value, match.Groups[5].Value);
|
||||
var ui = ParseUserInteraction(match.Groups[4].Value);
|
||||
var scope = match.Groups[5].Value.ToUpperInvariant() == "C";
|
||||
var c = ParseImpact(match.Groups[6].Value);
|
||||
var i = ParseImpact(match.Groups[7].Value);
|
||||
var a = ParseImpact(match.Groups[8].Value);
|
||||
|
||||
// Compute base score
|
||||
var baseScore = ComputeBaseScore(av, ac, pr, ui, scope, c, i, a);
|
||||
|
||||
// Parse temporal metrics if present
|
||||
double? temporalScore = null;
|
||||
if (match.Groups[9].Success && !string.IsNullOrEmpty(match.Groups[9].Value))
|
||||
{
|
||||
var e = ParseExploitCodeMaturity(match.Groups[9].Value);
|
||||
var rl = ParseRemediationLevel(match.Groups[10].Value);
|
||||
var rc = ParseReportConfidence(match.Groups[11].Value);
|
||||
temporalScore = RoundUp(baseScore * e * rl * rc);
|
||||
}
|
||||
|
||||
// Parse environmental metrics if present
|
||||
double? environmentalScore = null;
|
||||
if (match.Groups[12].Success && !string.IsNullOrEmpty(match.Groups[12].Value))
|
||||
{
|
||||
var cr = ParseRequirement(match.Groups[12].Value);
|
||||
var ir = ParseRequirement(match.Groups[13].Value);
|
||||
var ar = ParseRequirement(match.Groups[14].Value);
|
||||
|
||||
// Modified base metrics (use base values if not specified)
|
||||
var mav = match.Groups[15].Success ? ParseModifiedAttackVector(match.Groups[15].Value) ?? av : av;
|
||||
var mac = match.Groups[16].Success ? ParseModifiedAttackComplexity(match.Groups[16].Value) ?? ac : ac;
|
||||
var mpr = match.Groups[17].Success ? ParseModifiedPrivilegesRequired(match.Groups[17].Value, match.Groups[19].Value) ?? pr : pr;
|
||||
var mui = match.Groups[18].Success ? ParseModifiedUserInteraction(match.Groups[18].Value) ?? ui : ui;
|
||||
var ms = match.Groups[19].Success ? ParseModifiedScope(match.Groups[19].Value) ?? scope : scope;
|
||||
var mc = match.Groups[20].Success ? ParseModifiedImpact(match.Groups[20].Value) ?? c : c;
|
||||
var mi = match.Groups[21].Success ? ParseModifiedImpact(match.Groups[21].Value) ?? i : i;
|
||||
var ma = match.Groups[22].Success ? ParseModifiedImpact(match.Groups[22].Value) ?? a : a;
|
||||
|
||||
environmentalScore = ComputeEnvironmentalScore(mav, mac, mpr, mui, ms, mc, mi, ma, cr, ir, ar);
|
||||
|
||||
// Apply temporal to environmental if temporal present
|
||||
if (temporalScore.HasValue && match.Groups[9].Success)
|
||||
{
|
||||
var e = ParseExploitCodeMaturity(match.Groups[9].Value);
|
||||
var rl = ParseRemediationLevel(match.Groups[10].Value);
|
||||
var rc = ParseReportConfidence(match.Groups[11].Value);
|
||||
environmentalScore = RoundUp(environmentalScore.Value * e * rl * rc);
|
||||
}
|
||||
}
|
||||
|
||||
var effectiveScore = environmentalScore ?? temporalScore ?? baseScore;
|
||||
|
||||
return new CvssVersionedScore
|
||||
{
|
||||
Version = _version,
|
||||
BaseScore = baseScore,
|
||||
TemporalScore = temporalScore,
|
||||
EnvironmentalScore = environmentalScore,
|
||||
EffectiveScore = effectiveScore,
|
||||
Severity = GetSeverityLabel(effectiveScore),
|
||||
VectorString = NormalizeVector(vectorString)
|
||||
};
|
||||
}
|
||||
|
||||
public bool IsValidVector(string vectorString)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(vectorString))
|
||||
return false;
|
||||
return VectorPattern().IsMatch(vectorString.Trim());
|
||||
}
|
||||
|
||||
public string GetSeverityLabel(double score) => score switch
|
||||
{
|
||||
>= 9.0 => "Critical",
|
||||
>= 7.0 => "High",
|
||||
>= 4.0 => "Medium",
|
||||
> 0 => "Low",
|
||||
_ => "None"
|
||||
};
|
||||
|
||||
private double ComputeBaseScore(double av, double ac, double pr, double ui, bool scope, double c, double i, double a)
|
||||
{
|
||||
var iss = 1 - (1 - c) * (1 - i) * (1 - a);
|
||||
|
||||
double impact;
|
||||
if (scope)
|
||||
{
|
||||
// Changed scope
|
||||
impact = 7.52 * (iss - 0.029) - 3.25 * Math.Pow(iss - 0.02, 15);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Unchanged scope
|
||||
impact = 6.42 * iss;
|
||||
}
|
||||
|
||||
var exploitability = 8.22 * av * ac * pr * ui;
|
||||
|
||||
if (impact <= 0)
|
||||
return 0;
|
||||
|
||||
double baseScore;
|
||||
if (scope)
|
||||
{
|
||||
baseScore = Math.Min(1.08 * (impact + exploitability), 10);
|
||||
}
|
||||
else
|
||||
{
|
||||
baseScore = Math.Min(impact + exploitability, 10);
|
||||
}
|
||||
|
||||
return RoundUp(baseScore);
|
||||
}
|
||||
|
||||
private double ComputeEnvironmentalScore(double mav, double mac, double mpr, double mui, bool ms,
|
||||
double mc, double mi, double ma, double cr, double ir, double ar)
|
||||
{
|
||||
var miss = Math.Min(1 - (1 - mc * cr) * (1 - mi * ir) * (1 - ma * ar), 0.915);
|
||||
|
||||
double modifiedImpact;
|
||||
if (ms)
|
||||
{
|
||||
modifiedImpact = 7.52 * (miss - 0.029) - 3.25 * Math.Pow(miss * 0.9731 - 0.02, 13);
|
||||
}
|
||||
else
|
||||
{
|
||||
modifiedImpact = 6.42 * miss;
|
||||
}
|
||||
|
||||
var modifiedExploitability = 8.22 * mav * mac * mpr * mui;
|
||||
|
||||
if (modifiedImpact <= 0)
|
||||
return 0;
|
||||
|
||||
double envScore;
|
||||
if (ms)
|
||||
{
|
||||
envScore = Math.Min(1.08 * (modifiedImpact + modifiedExploitability), 10);
|
||||
}
|
||||
else
|
||||
{
|
||||
envScore = Math.Min(modifiedImpact + modifiedExploitability, 10);
|
||||
}
|
||||
|
||||
return RoundUp(envScore);
|
||||
}
|
||||
|
||||
private static string NormalizeVector(string vector)
|
||||
{
|
||||
var normalized = vector.Trim().ToUpperInvariant();
|
||||
// Ensure proper prefix
|
||||
if (!normalized.StartsWith("CVSS:3.", StringComparison.Ordinal))
|
||||
{
|
||||
normalized = "CVSS:3.1/" + normalized;
|
||||
}
|
||||
return normalized;
|
||||
}
|
||||
|
||||
private static double RoundUp(double value)
|
||||
{
|
||||
// CVSS v3 uses "round up" to nearest 0.1
|
||||
var intValue = (int)Math.Round(value * 100000);
|
||||
if (intValue % 10000 == 0)
|
||||
return intValue / 100000.0;
|
||||
return (Math.Floor((double)intValue / 10000) + 1) / 10.0;
|
||||
}
|
||||
|
||||
// Attack Vector (AV)
|
||||
private static double ParseAttackVector(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"N" => 0.85, // Network
|
||||
"A" => 0.62, // Adjacent
|
||||
"L" => 0.55, // Local
|
||||
"P" => 0.2, // Physical
|
||||
_ => throw new ArgumentException($"Invalid Attack Vector: {value}")
|
||||
};
|
||||
|
||||
// Attack Complexity (AC)
|
||||
private static double ParseAttackComplexity(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"L" => 0.77, // Low
|
||||
"H" => 0.44, // High
|
||||
_ => throw new ArgumentException($"Invalid Attack Complexity: {value}")
|
||||
};
|
||||
|
||||
// Privileges Required (PR) - depends on Scope
|
||||
private static double ParsePrivilegesRequired(string value, string scopeValue)
|
||||
{
|
||||
var scopeChanged = scopeValue.ToUpperInvariant() == "C";
|
||||
return value.ToUpperInvariant() switch
|
||||
{
|
||||
"N" => 0.85, // None
|
||||
"L" => scopeChanged ? 0.68 : 0.62, // Low
|
||||
"H" => scopeChanged ? 0.5 : 0.27, // High
|
||||
_ => throw new ArgumentException($"Invalid Privileges Required: {value}")
|
||||
};
|
||||
}
|
||||
|
||||
// User Interaction (UI)
|
||||
private static double ParseUserInteraction(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"N" => 0.85, // None
|
||||
"R" => 0.62, // Required
|
||||
_ => throw new ArgumentException($"Invalid User Interaction: {value}")
|
||||
};
|
||||
|
||||
// Impact (C/I/A)
|
||||
private static double ParseImpact(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"N" => 0, // None
|
||||
"L" => 0.22, // Low
|
||||
"H" => 0.56, // High
|
||||
_ => throw new ArgumentException($"Invalid Impact: {value}")
|
||||
};
|
||||
|
||||
// Exploit Code Maturity (E)
|
||||
private static double ParseExploitCodeMaturity(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"X" => 1.0, // Not Defined
|
||||
"U" => 0.91, // Unproven
|
||||
"P" => 0.94, // Proof of Concept
|
||||
"F" => 0.97, // Functional
|
||||
"H" => 1.0, // High
|
||||
_ => 1.0
|
||||
};
|
||||
|
||||
// Remediation Level (RL)
|
||||
private static double ParseRemediationLevel(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"X" => 1.0, // Not Defined
|
||||
"O" => 0.95, // Official Fix
|
||||
"T" => 0.96, // Temporary Fix
|
||||
"W" => 0.97, // Workaround
|
||||
"U" => 1.0, // Unavailable
|
||||
_ => 1.0
|
||||
};
|
||||
|
||||
// Report Confidence (RC)
|
||||
private static double ParseReportConfidence(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"X" => 1.0, // Not Defined
|
||||
"U" => 0.92, // Unknown
|
||||
"R" => 0.96, // Reasonable
|
||||
"C" => 1.0, // Confirmed
|
||||
_ => 1.0
|
||||
};
|
||||
|
||||
// Security Requirements (CR/IR/AR)
|
||||
private static double ParseRequirement(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"X" => 1.0, // Not Defined
|
||||
"L" => 0.5, // Low
|
||||
"M" => 1.0, // Medium
|
||||
"H" => 1.5, // High
|
||||
_ => 1.0
|
||||
};
|
||||
|
||||
// Modified metrics - return null if "X" (Not Defined) to use base value
|
||||
private static double? ParseModifiedAttackVector(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"X" => null,
|
||||
"N" => 0.85,
|
||||
"A" => 0.62,
|
||||
"L" => 0.55,
|
||||
"P" => 0.2,
|
||||
_ => null
|
||||
};
|
||||
|
||||
private static double? ParseModifiedAttackComplexity(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"X" => null,
|
||||
"L" => 0.77,
|
||||
"H" => 0.44,
|
||||
_ => null
|
||||
};
|
||||
|
||||
private static double? ParseModifiedPrivilegesRequired(string value, string scopeValue)
|
||||
{
|
||||
if (value.ToUpperInvariant() == "X") return null;
|
||||
var scopeChanged = scopeValue.ToUpperInvariant() == "C";
|
||||
return value.ToUpperInvariant() switch
|
||||
{
|
||||
"N" => 0.85,
|
||||
"L" => scopeChanged ? 0.68 : 0.62,
|
||||
"H" => scopeChanged ? 0.5 : 0.27,
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
private static double? ParseModifiedUserInteraction(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"X" => null,
|
||||
"N" => 0.85,
|
||||
"R" => 0.62,
|
||||
_ => null
|
||||
};
|
||||
|
||||
private static bool? ParseModifiedScope(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"X" => null,
|
||||
"U" => false,
|
||||
"C" => true,
|
||||
_ => null
|
||||
};
|
||||
|
||||
private static double? ParseModifiedImpact(string value) => value.ToUpperInvariant() switch
|
||||
{
|
||||
"X" => null,
|
||||
"N" => 0,
|
||||
"L" => 0.22,
|
||||
"H" => 0.56,
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
102
src/Policy/StellaOps.Policy.Scoring/Engine/CvssVersion.cs
Normal file
102
src/Policy/StellaOps.Policy.Scoring/Engine/CvssVersion.cs
Normal file
@@ -0,0 +1,102 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Scoring.Engine;
|
||||
|
||||
/// <summary>
|
||||
/// CVSS specification version.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum CvssVersion
|
||||
{
|
||||
/// <summary>CVSS v2.0</summary>
|
||||
V2,
|
||||
|
||||
/// <summary>CVSS v3.0</summary>
|
||||
V3_0,
|
||||
|
||||
/// <summary>CVSS v3.1</summary>
|
||||
V3_1,
|
||||
|
||||
/// <summary>CVSS v4.0</summary>
|
||||
V4_0
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Version-agnostic CVSS score result.
|
||||
/// </summary>
|
||||
public sealed record CvssVersionedScore
|
||||
{
|
||||
/// <summary>The CVSS version used for scoring.</summary>
|
||||
public required CvssVersion Version { get; init; }
|
||||
|
||||
/// <summary>Base score (0.0-10.0).</summary>
|
||||
public required double BaseScore { get; init; }
|
||||
|
||||
/// <summary>Temporal score (v2/v3) or Threat score (v4).</summary>
|
||||
public double? TemporalScore { get; init; }
|
||||
|
||||
/// <summary>Environmental score.</summary>
|
||||
public double? EnvironmentalScore { get; init; }
|
||||
|
||||
/// <summary>The effective score to use for prioritization.</summary>
|
||||
public required double EffectiveScore { get; init; }
|
||||
|
||||
/// <summary>Severity label (None/Low/Medium/High/Critical).</summary>
|
||||
public required string Severity { get; init; }
|
||||
|
||||
/// <summary>Vector string in version-appropriate format.</summary>
|
||||
public required string VectorString { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Universal CVSS engine interface supporting all versions.
|
||||
/// </summary>
|
||||
public interface ICvssEngine
|
||||
{
|
||||
/// <summary>The CVSS version this engine implements.</summary>
|
||||
CvssVersion Version { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Computes scores from a vector string.
|
||||
/// </summary>
|
||||
/// <param name="vectorString">CVSS vector string.</param>
|
||||
/// <returns>Computed score with version information.</returns>
|
||||
CvssVersionedScore ComputeFromVector(string vectorString);
|
||||
|
||||
/// <summary>
|
||||
/// Validates a vector string format.
|
||||
/// </summary>
|
||||
/// <param name="vectorString">Vector string to validate.</param>
|
||||
/// <returns>True if valid for this version.</returns>
|
||||
bool IsValidVector(string vectorString);
|
||||
|
||||
/// <summary>
|
||||
/// Gets severity label for a score.
|
||||
/// </summary>
|
||||
/// <param name="score">CVSS score (0.0-10.0).</param>
|
||||
/// <returns>Severity label.</returns>
|
||||
string GetSeverityLabel(double score);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Factory for creating version-appropriate CVSS engines.
|
||||
/// </summary>
|
||||
public interface ICvssEngineFactory
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates an engine for the specified version.
|
||||
/// </summary>
|
||||
ICvssEngine Create(CvssVersion version);
|
||||
|
||||
/// <summary>
|
||||
/// Detects the CVSS version from a vector string.
|
||||
/// </summary>
|
||||
/// <param name="vectorString">Vector string to analyze.</param>
|
||||
/// <returns>Detected version, or null if unrecognized.</returns>
|
||||
CvssVersion? DetectVersion(string vectorString);
|
||||
|
||||
/// <summary>
|
||||
/// Computes scores automatically detecting version from vector string.
|
||||
/// </summary>
|
||||
CvssVersionedScore ComputeFromVector(string vectorString);
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -8,6 +8,10 @@
|
||||
<Description>CVSS v4.0 scoring engine with deterministic receipt generation for StellaOps policy decisions.</Description>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<InternalsVisibleTo Include="StellaOps.Policy.Scoring.Tests" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="JsonSchema.Net" Version="7.3.2" />
|
||||
<ProjectReference Include="..\..\Attestor\StellaOps.Attestor.Envelope\StellaOps.Attestor.Envelope.csproj" />
|
||||
|
||||
@@ -0,0 +1,182 @@
|
||||
-- Policy Schema Migration 004: EPSS Data and Risk Scores
|
||||
-- Adds tables for EPSS (Exploit Prediction Scoring System) data and combined risk scores
|
||||
|
||||
-- EPSS scores table (cached EPSS data from FIRST.org)
|
||||
CREATE TABLE IF NOT EXISTS policy.epss_scores (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
cve_id TEXT NOT NULL,
|
||||
score NUMERIC(6,5) NOT NULL CHECK (score >= 0 AND score <= 1),
|
||||
percentile NUMERIC(6,5) NOT NULL CHECK (percentile >= 0 AND percentile <= 1),
|
||||
model_version DATE NOT NULL,
|
||||
source TEXT NOT NULL DEFAULT 'first.org',
|
||||
fetched_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
expires_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + INTERVAL '7 days',
|
||||
UNIQUE(cve_id, model_version)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_epss_scores_cve ON policy.epss_scores(cve_id);
|
||||
CREATE INDEX idx_epss_scores_percentile ON policy.epss_scores(percentile DESC);
|
||||
CREATE INDEX idx_epss_scores_expires ON policy.epss_scores(expires_at);
|
||||
CREATE INDEX idx_epss_scores_model ON policy.epss_scores(model_version);
|
||||
|
||||
-- EPSS history table (for tracking score changes over time)
|
||||
CREATE TABLE IF NOT EXISTS policy.epss_history (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
cve_id TEXT NOT NULL,
|
||||
score NUMERIC(6,5) NOT NULL,
|
||||
percentile NUMERIC(6,5) NOT NULL,
|
||||
model_version DATE NOT NULL,
|
||||
recorded_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX idx_epss_history_cve ON policy.epss_history(cve_id);
|
||||
CREATE INDEX idx_epss_history_recorded ON policy.epss_history(cve_id, recorded_at DESC);
|
||||
|
||||
-- Combined risk scores table (CVSS + KEV + EPSS)
|
||||
CREATE TABLE IF NOT EXISTS policy.risk_scores (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id UUID NOT NULL,
|
||||
vulnerability_id TEXT NOT NULL,
|
||||
cvss_receipt_id UUID REFERENCES policy.cvss_receipts(id),
|
||||
|
||||
-- Component scores
|
||||
cvss_score NUMERIC(4,1) NOT NULL,
|
||||
cvss_version TEXT NOT NULL,
|
||||
kev_flag BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
kev_added_date DATE,
|
||||
epss_score NUMERIC(6,5),
|
||||
epss_percentile NUMERIC(6,5),
|
||||
epss_model_version DATE,
|
||||
|
||||
-- Risk bonuses applied
|
||||
kev_bonus NUMERIC(4,2) NOT NULL DEFAULT 0 CHECK (kev_bonus >= 0 AND kev_bonus <= 1),
|
||||
epss_bonus NUMERIC(4,2) NOT NULL DEFAULT 0 CHECK (epss_bonus >= 0 AND epss_bonus <= 1),
|
||||
|
||||
-- Combined risk score (0.0 to 1.0)
|
||||
combined_risk_score NUMERIC(4,3) NOT NULL CHECK (combined_risk_score >= 0 AND combined_risk_score <= 1),
|
||||
|
||||
-- Risk signal formula used
|
||||
formula_version TEXT NOT NULL DEFAULT 'v1',
|
||||
formula_params JSONB NOT NULL DEFAULT '{}',
|
||||
|
||||
-- Determinism
|
||||
input_hash TEXT NOT NULL,
|
||||
|
||||
-- Metadata
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
created_by TEXT,
|
||||
|
||||
UNIQUE(tenant_id, vulnerability_id, input_hash)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_risk_scores_tenant ON policy.risk_scores(tenant_id);
|
||||
CREATE INDEX idx_risk_scores_vuln ON policy.risk_scores(tenant_id, vulnerability_id);
|
||||
CREATE INDEX idx_risk_scores_combined ON policy.risk_scores(tenant_id, combined_risk_score DESC);
|
||||
CREATE INDEX idx_risk_scores_kev ON policy.risk_scores(kev_flag) WHERE kev_flag = TRUE;
|
||||
CREATE INDEX idx_risk_scores_epss ON policy.risk_scores(epss_percentile DESC) WHERE epss_percentile IS NOT NULL;
|
||||
CREATE INDEX idx_risk_scores_created ON policy.risk_scores(tenant_id, created_at DESC);
|
||||
|
||||
-- EPSS bonus thresholds configuration table
|
||||
CREATE TABLE IF NOT EXISTS policy.epss_thresholds (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id UUID NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
is_default BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
thresholds JSONB NOT NULL DEFAULT '[
|
||||
{"percentile": 0.99, "bonus": 0.10},
|
||||
{"percentile": 0.90, "bonus": 0.05},
|
||||
{"percentile": 0.50, "bonus": 0.02}
|
||||
]'::jsonb,
|
||||
kev_bonus NUMERIC(4,2) NOT NULL DEFAULT 0.20,
|
||||
description TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
created_by TEXT,
|
||||
UNIQUE(tenant_id, name)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_epss_thresholds_tenant ON policy.epss_thresholds(tenant_id);
|
||||
CREATE INDEX idx_epss_thresholds_default ON policy.epss_thresholds(tenant_id, is_default)
|
||||
WHERE is_default = TRUE;
|
||||
|
||||
-- Risk score history (audit trail)
|
||||
CREATE TABLE IF NOT EXISTS policy.risk_score_history (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
risk_score_id UUID NOT NULL REFERENCES policy.risk_scores(id),
|
||||
cvss_score NUMERIC(4,1) NOT NULL,
|
||||
kev_flag BOOLEAN NOT NULL,
|
||||
epss_score NUMERIC(6,5),
|
||||
epss_percentile NUMERIC(6,5),
|
||||
combined_risk_score NUMERIC(4,3) NOT NULL,
|
||||
changed_by TEXT,
|
||||
changed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
change_reason TEXT
|
||||
);
|
||||
|
||||
CREATE INDEX idx_risk_score_history_score ON policy.risk_score_history(risk_score_id);
|
||||
CREATE INDEX idx_risk_score_history_changed ON policy.risk_score_history(changed_at);
|
||||
|
||||
-- Trigger for risk_scores updated_at
|
||||
CREATE TRIGGER trg_risk_scores_updated_at
|
||||
BEFORE UPDATE ON policy.risk_scores
|
||||
FOR EACH ROW EXECUTE FUNCTION policy.update_updated_at();
|
||||
|
||||
-- Trigger for epss_thresholds updated_at
|
||||
CREATE TRIGGER trg_epss_thresholds_updated_at
|
||||
BEFORE UPDATE ON policy.epss_thresholds
|
||||
FOR EACH ROW EXECUTE FUNCTION policy.update_updated_at();
|
||||
|
||||
-- Insert default EPSS threshold configuration
|
||||
INSERT INTO policy.epss_thresholds (tenant_id, name, is_default, thresholds, kev_bonus, description)
|
||||
VALUES (
|
||||
'00000000-0000-0000-0000-000000000000'::uuid,
|
||||
'default',
|
||||
TRUE,
|
||||
'[
|
||||
{"percentile": 0.99, "bonus": 0.10, "description": "Top 1% most likely to be exploited"},
|
||||
{"percentile": 0.90, "bonus": 0.05, "description": "Top 10% exploitation probability"},
|
||||
{"percentile": 0.50, "bonus": 0.02, "description": "Above median exploitation probability"}
|
||||
]'::jsonb,
|
||||
0.20,
|
||||
'Default EPSS bonus thresholds per StellaOps standard risk formula'
|
||||
) ON CONFLICT DO NOTHING;
|
||||
|
||||
-- View for current EPSS scores (latest model version)
|
||||
CREATE OR REPLACE VIEW policy.epss_current AS
|
||||
SELECT DISTINCT ON (cve_id)
|
||||
cve_id,
|
||||
score,
|
||||
percentile,
|
||||
model_version,
|
||||
fetched_at
|
||||
FROM policy.epss_scores
|
||||
WHERE expires_at > NOW()
|
||||
ORDER BY cve_id, model_version DESC;
|
||||
|
||||
-- View for high-risk vulnerabilities (KEV or high EPSS)
|
||||
CREATE OR REPLACE VIEW policy.high_risk_vulns AS
|
||||
SELECT
|
||||
rs.tenant_id,
|
||||
rs.vulnerability_id,
|
||||
rs.cvss_score,
|
||||
rs.cvss_version,
|
||||
rs.kev_flag,
|
||||
rs.epss_percentile,
|
||||
rs.combined_risk_score,
|
||||
CASE
|
||||
WHEN rs.kev_flag THEN 'KEV'
|
||||
WHEN rs.epss_percentile >= 0.95 THEN 'High EPSS (95th+)'
|
||||
WHEN rs.epss_percentile >= 0.90 THEN 'High EPSS (90th+)'
|
||||
ELSE 'CVSS Only'
|
||||
END AS risk_category
|
||||
FROM policy.risk_scores rs
|
||||
WHERE rs.kev_flag = TRUE
|
||||
OR rs.epss_percentile >= 0.90
|
||||
OR rs.combined_risk_score >= 0.90;
|
||||
|
||||
COMMENT ON TABLE policy.epss_scores IS 'Cached EPSS scores from FIRST.org for CVE exploitation probability';
|
||||
COMMENT ON TABLE policy.risk_scores IS 'Combined risk scores using CVSS + KEV + EPSS formula';
|
||||
COMMENT ON TABLE policy.epss_thresholds IS 'Configurable EPSS bonus thresholds for risk calculation';
|
||||
COMMENT ON VIEW policy.epss_current IS 'Current (non-expired) EPSS scores per CVE';
|
||||
COMMENT ON VIEW policy.high_risk_vulns IS 'Vulnerabilities flagged as high-risk due to KEV or high EPSS';
|
||||
@@ -0,0 +1,195 @@
|
||||
-- Policy Schema Migration 005: CVSS Multi-Version Enhancements
|
||||
-- Adds views and indexes for multi-version CVSS support (v2.0, v3.0, v3.1, v4.0)
|
||||
|
||||
-- Add version-specific columns for temporal and environmental scores (v2/v3)
|
||||
-- Note: base_metrics, threat_metrics, environmental_metrics already support JSONB storage
|
||||
|
||||
-- Add index for CVSS version filtering
|
||||
CREATE INDEX IF NOT EXISTS idx_cvss_receipts_version
|
||||
ON policy.cvss_receipts(cvss_version);
|
||||
|
||||
-- Add index for severity filtering
|
||||
CREATE INDEX IF NOT EXISTS idx_cvss_receipts_severity
|
||||
ON policy.cvss_receipts(tenant_id, severity);
|
||||
|
||||
-- Add composite index for version + severity queries
|
||||
CREATE INDEX IF NOT EXISTS idx_cvss_receipts_version_severity
|
||||
ON policy.cvss_receipts(tenant_id, cvss_version, severity);
|
||||
|
||||
-- View for CVSS v2 receipts with metrics unpacked
|
||||
CREATE OR REPLACE VIEW policy.cvss_v2_receipts AS
|
||||
SELECT
|
||||
id,
|
||||
tenant_id,
|
||||
vulnerability_id,
|
||||
vector,
|
||||
severity,
|
||||
base_score,
|
||||
-- V2-specific: temporal_score stored in threat_score column
|
||||
threat_score AS temporal_score,
|
||||
environmental_score,
|
||||
effective_score,
|
||||
-- Extract v2 base metrics
|
||||
base_metrics->>'accessVector' AS access_vector,
|
||||
base_metrics->>'accessComplexity' AS access_complexity,
|
||||
base_metrics->>'authentication' AS authentication,
|
||||
base_metrics->>'confidentialityImpact' AS confidentiality_impact,
|
||||
base_metrics->>'integrityImpact' AS integrity_impact,
|
||||
base_metrics->>'availabilityImpact' AS availability_impact,
|
||||
-- Extract v2 temporal metrics (if present)
|
||||
threat_metrics->>'exploitability' AS exploitability,
|
||||
threat_metrics->>'remediationLevel' AS remediation_level,
|
||||
threat_metrics->>'reportConfidence' AS report_confidence,
|
||||
input_hash,
|
||||
created_at,
|
||||
is_active
|
||||
FROM policy.cvss_receipts
|
||||
WHERE cvss_version = '2.0';
|
||||
|
||||
-- View for CVSS v3.x receipts with metrics unpacked
|
||||
CREATE OR REPLACE VIEW policy.cvss_v3_receipts AS
|
||||
SELECT
|
||||
id,
|
||||
tenant_id,
|
||||
vulnerability_id,
|
||||
vector,
|
||||
cvss_version,
|
||||
severity,
|
||||
base_score,
|
||||
threat_score AS temporal_score,
|
||||
environmental_score,
|
||||
effective_score,
|
||||
-- Extract v3 base metrics
|
||||
base_metrics->>'attackVector' AS attack_vector,
|
||||
base_metrics->>'attackComplexity' AS attack_complexity,
|
||||
base_metrics->>'privilegesRequired' AS privileges_required,
|
||||
base_metrics->>'userInteraction' AS user_interaction,
|
||||
base_metrics->>'scope' AS scope,
|
||||
base_metrics->>'confidentialityImpact' AS confidentiality_impact,
|
||||
base_metrics->>'integrityImpact' AS integrity_impact,
|
||||
base_metrics->>'availabilityImpact' AS availability_impact,
|
||||
-- Extract v3 temporal metrics (if present)
|
||||
threat_metrics->>'exploitCodeMaturity' AS exploit_code_maturity,
|
||||
threat_metrics->>'remediationLevel' AS remediation_level,
|
||||
threat_metrics->>'reportConfidence' AS report_confidence,
|
||||
input_hash,
|
||||
created_at,
|
||||
is_active
|
||||
FROM policy.cvss_receipts
|
||||
WHERE cvss_version IN ('3.0', '3.1');
|
||||
|
||||
-- View for CVSS v4 receipts with metrics unpacked
|
||||
CREATE OR REPLACE VIEW policy.cvss_v4_receipts AS
|
||||
SELECT
|
||||
id,
|
||||
tenant_id,
|
||||
vulnerability_id,
|
||||
vector,
|
||||
severity,
|
||||
base_score,
|
||||
threat_score,
|
||||
environmental_score,
|
||||
full_score,
|
||||
effective_score,
|
||||
effective_score_type,
|
||||
-- Extract v4 base metrics
|
||||
base_metrics->>'attackVector' AS attack_vector,
|
||||
base_metrics->>'attackComplexity' AS attack_complexity,
|
||||
base_metrics->>'attackRequirements' AS attack_requirements,
|
||||
base_metrics->>'privilegesRequired' AS privileges_required,
|
||||
base_metrics->>'userInteraction' AS user_interaction,
|
||||
base_metrics->>'vulnConfidentialityImpact' AS vuln_confidentiality,
|
||||
base_metrics->>'vulnIntegrityImpact' AS vuln_integrity,
|
||||
base_metrics->>'vulnAvailabilityImpact' AS vuln_availability,
|
||||
base_metrics->>'subConfidentialityImpact' AS sub_confidentiality,
|
||||
base_metrics->>'subIntegrityImpact' AS sub_integrity,
|
||||
base_metrics->>'subAvailabilityImpact' AS sub_availability,
|
||||
-- Extract v4 threat metrics
|
||||
threat_metrics->>'exploitMaturity' AS exploit_maturity,
|
||||
-- Extract v4 supplemental metrics
|
||||
supplemental_metrics->>'safety' AS safety,
|
||||
supplemental_metrics->>'automatable' AS automatable,
|
||||
supplemental_metrics->>'recovery' AS recovery,
|
||||
supplemental_metrics->>'valueDensity' AS value_density,
|
||||
supplemental_metrics->>'responseEffort' AS response_effort,
|
||||
supplemental_metrics->>'providerUrgency' AS provider_urgency,
|
||||
input_hash,
|
||||
created_at,
|
||||
is_active
|
||||
FROM policy.cvss_receipts
|
||||
WHERE cvss_version = '4.0';
|
||||
|
||||
-- Summary view by CVSS version
|
||||
CREATE OR REPLACE VIEW policy.cvss_version_summary AS
|
||||
SELECT
|
||||
tenant_id,
|
||||
cvss_version,
|
||||
COUNT(*) AS total_receipts,
|
||||
COUNT(*) FILTER (WHERE is_active) AS active_receipts,
|
||||
ROUND(AVG(base_score)::numeric, 1) AS avg_base_score,
|
||||
ROUND(AVG(effective_score)::numeric, 1) AS avg_effective_score,
|
||||
COUNT(*) FILTER (WHERE severity = 'Critical') AS critical_count,
|
||||
COUNT(*) FILTER (WHERE severity = 'High') AS high_count,
|
||||
COUNT(*) FILTER (WHERE severity = 'Medium') AS medium_count,
|
||||
COUNT(*) FILTER (WHERE severity = 'Low') AS low_count,
|
||||
COUNT(*) FILTER (WHERE severity = 'None') AS none_count
|
||||
FROM policy.cvss_receipts
|
||||
GROUP BY tenant_id, cvss_version;
|
||||
|
||||
-- Function to get severity from score (version-aware)
|
||||
CREATE OR REPLACE FUNCTION policy.cvss_severity(
|
||||
p_score NUMERIC,
|
||||
p_version TEXT
|
||||
) RETURNS TEXT AS $$
|
||||
BEGIN
|
||||
-- V2 uses different thresholds than v3/v4
|
||||
IF p_version = '2.0' THEN
|
||||
RETURN CASE
|
||||
WHEN p_score >= 7.0 THEN 'High'
|
||||
WHEN p_score >= 4.0 THEN 'Medium'
|
||||
WHEN p_score > 0 THEN 'Low'
|
||||
ELSE 'None'
|
||||
END;
|
||||
ELSE
|
||||
-- V3.x and V4.0 use the same thresholds
|
||||
RETURN CASE
|
||||
WHEN p_score >= 9.0 THEN 'Critical'
|
||||
WHEN p_score >= 7.0 THEN 'High'
|
||||
WHEN p_score >= 4.0 THEN 'Medium'
|
||||
WHEN p_score >= 0.1 THEN 'Low'
|
||||
ELSE 'None'
|
||||
END;
|
||||
END IF;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql IMMUTABLE;
|
||||
|
||||
-- Function to validate CVSS vector format
|
||||
CREATE OR REPLACE FUNCTION policy.validate_cvss_vector(
|
||||
p_vector TEXT,
|
||||
p_version TEXT
|
||||
) RETURNS BOOLEAN AS $$
|
||||
BEGIN
|
||||
CASE p_version
|
||||
WHEN '2.0' THEN
|
||||
RETURN p_vector ~ '^(CVSS2#)?AV:[LAN]/AC:[HML]/Au:[MSN]/C:[NPC]/I:[NPC]/A:[NPC]';
|
||||
WHEN '3.0', '3.1' THEN
|
||||
RETURN p_vector ~ '^CVSS:3\.[01]/AV:[NALP]/AC:[LH]/PR:[NLH]/UI:[NR]/S:[UC]/C:[NLH]/I:[NLH]/A:[NLH]';
|
||||
WHEN '4.0' THEN
|
||||
RETURN p_vector ~ '^CVSS:4\.0/AV:[NALP]/AC:[LH]/AT:[NP]/PR:[NLH]/UI:[NAP]/VC:[NLH]/VI:[NLH]/VA:[NLH]/SC:[NLH]/SI:[NLH]/SA:[NLH]';
|
||||
ELSE
|
||||
RETURN FALSE;
|
||||
END CASE;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql IMMUTABLE;
|
||||
|
||||
-- Add check constraint for vector format validation (optional - can be expensive)
|
||||
-- ALTER TABLE policy.cvss_receipts
|
||||
-- ADD CONSTRAINT cvss_receipts_vector_format_check
|
||||
-- CHECK (policy.validate_cvss_vector(vector, cvss_version));
|
||||
|
||||
COMMENT ON VIEW policy.cvss_v2_receipts IS 'CVSS v2.0 receipts with metrics unpacked from JSONB';
|
||||
COMMENT ON VIEW policy.cvss_v3_receipts IS 'CVSS v3.0/v3.1 receipts with metrics unpacked from JSONB';
|
||||
COMMENT ON VIEW policy.cvss_v4_receipts IS 'CVSS v4.0 receipts with metrics unpacked from JSONB';
|
||||
COMMENT ON VIEW policy.cvss_version_summary IS 'Summary statistics grouped by CVSS version';
|
||||
COMMENT ON FUNCTION policy.cvss_severity IS 'Returns severity string from score using version-appropriate thresholds';
|
||||
COMMENT ON FUNCTION policy.validate_cvss_vector IS 'Validates CVSS vector string format for specified version';
|
||||
@@ -0,0 +1,154 @@
|
||||
-- Policy Schema Migration 006: Row-Level Security
|
||||
-- Sprint: SPRINT_3421_0001_0001 - RLS Expansion
|
||||
-- Category: B (release migration, requires coordination)
|
||||
--
|
||||
-- Purpose: Enable Row-Level Security on all tenant-scoped tables in the policy
|
||||
-- schema to provide database-level tenant isolation as defense-in-depth.
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 1: Create helper schema and function for tenant context
|
||||
-- ============================================================================
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS policy_app;
|
||||
|
||||
-- Tenant context helper function
|
||||
CREATE OR REPLACE FUNCTION policy_app.require_current_tenant()
|
||||
RETURNS TEXT
|
||||
LANGUAGE plpgsql STABLE SECURITY DEFINER
|
||||
AS $$
|
||||
DECLARE
|
||||
v_tenant TEXT;
|
||||
BEGIN
|
||||
v_tenant := current_setting('app.tenant_id', true);
|
||||
IF v_tenant IS NULL OR v_tenant = '' THEN
|
||||
RAISE EXCEPTION 'app.tenant_id session variable not set'
|
||||
USING HINT = 'Set via: SELECT set_config(''app.tenant_id'', ''<tenant>'', false)',
|
||||
ERRCODE = 'P0001';
|
||||
END IF;
|
||||
RETURN v_tenant;
|
||||
END;
|
||||
$$;
|
||||
|
||||
REVOKE ALL ON FUNCTION policy_app.require_current_tenant() FROM PUBLIC;
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 2: Enable RLS on tables with direct tenant_id column
|
||||
-- ============================================================================
|
||||
|
||||
-- policy.packs
|
||||
ALTER TABLE policy.packs ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE policy.packs FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS packs_tenant_isolation ON policy.packs;
|
||||
CREATE POLICY packs_tenant_isolation ON policy.packs
|
||||
FOR ALL
|
||||
USING (tenant_id = policy_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = policy_app.require_current_tenant());
|
||||
|
||||
-- policy.risk_profiles
|
||||
ALTER TABLE policy.risk_profiles ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE policy.risk_profiles FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS risk_profiles_tenant_isolation ON policy.risk_profiles;
|
||||
CREATE POLICY risk_profiles_tenant_isolation ON policy.risk_profiles
|
||||
FOR ALL
|
||||
USING (tenant_id = policy_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = policy_app.require_current_tenant());
|
||||
|
||||
-- policy.evaluation_runs
|
||||
ALTER TABLE policy.evaluation_runs ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE policy.evaluation_runs FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS evaluation_runs_tenant_isolation ON policy.evaluation_runs;
|
||||
CREATE POLICY evaluation_runs_tenant_isolation ON policy.evaluation_runs
|
||||
FOR ALL
|
||||
USING (tenant_id = policy_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = policy_app.require_current_tenant());
|
||||
|
||||
-- policy.exceptions
|
||||
ALTER TABLE policy.exceptions ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE policy.exceptions FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS exceptions_tenant_isolation ON policy.exceptions;
|
||||
CREATE POLICY exceptions_tenant_isolation ON policy.exceptions
|
||||
FOR ALL
|
||||
USING (tenant_id = policy_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = policy_app.require_current_tenant());
|
||||
|
||||
-- policy.audit
|
||||
ALTER TABLE policy.audit ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE policy.audit FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS audit_tenant_isolation ON policy.audit;
|
||||
CREATE POLICY audit_tenant_isolation ON policy.audit
|
||||
FOR ALL
|
||||
USING (tenant_id = policy_app.require_current_tenant())
|
||||
WITH CHECK (tenant_id = policy_app.require_current_tenant());
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 3: FK-based RLS for child tables (inherit tenant from parent)
|
||||
-- ============================================================================
|
||||
|
||||
-- policy.pack_versions inherits tenant from policy.packs
|
||||
ALTER TABLE policy.pack_versions ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE policy.pack_versions FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS pack_versions_tenant_isolation ON policy.pack_versions;
|
||||
CREATE POLICY pack_versions_tenant_isolation ON policy.pack_versions
|
||||
FOR ALL
|
||||
USING (
|
||||
pack_id IN (
|
||||
SELECT id FROM policy.packs
|
||||
WHERE tenant_id = policy_app.require_current_tenant()
|
||||
)
|
||||
);
|
||||
|
||||
-- policy.rules inherits tenant from policy.pack_versions -> policy.packs
|
||||
ALTER TABLE policy.rules ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE policy.rules FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS rules_tenant_isolation ON policy.rules;
|
||||
CREATE POLICY rules_tenant_isolation ON policy.rules
|
||||
FOR ALL
|
||||
USING (
|
||||
pack_version_id IN (
|
||||
SELECT pv.id FROM policy.pack_versions pv
|
||||
JOIN policy.packs p ON pv.pack_id = p.id
|
||||
WHERE p.tenant_id = policy_app.require_current_tenant()
|
||||
)
|
||||
);
|
||||
|
||||
-- policy.risk_profile_history inherits tenant from policy.risk_profiles
|
||||
ALTER TABLE policy.risk_profile_history ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE policy.risk_profile_history FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS risk_profile_history_tenant_isolation ON policy.risk_profile_history;
|
||||
CREATE POLICY risk_profile_history_tenant_isolation ON policy.risk_profile_history
|
||||
FOR ALL
|
||||
USING (
|
||||
risk_profile_id IN (
|
||||
SELECT id FROM policy.risk_profiles
|
||||
WHERE tenant_id = policy_app.require_current_tenant()
|
||||
)
|
||||
);
|
||||
|
||||
-- policy.explanations inherits tenant from policy.evaluation_runs
|
||||
ALTER TABLE policy.explanations ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE policy.explanations FORCE ROW LEVEL SECURITY;
|
||||
DROP POLICY IF EXISTS explanations_tenant_isolation ON policy.explanations;
|
||||
CREATE POLICY explanations_tenant_isolation ON policy.explanations
|
||||
FOR ALL
|
||||
USING (
|
||||
evaluation_run_id IN (
|
||||
SELECT id FROM policy.evaluation_runs
|
||||
WHERE tenant_id = policy_app.require_current_tenant()
|
||||
)
|
||||
);
|
||||
|
||||
-- ============================================================================
|
||||
-- Step 4: Create admin bypass role
|
||||
-- ============================================================================
|
||||
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT FROM pg_roles WHERE rolname = 'policy_admin') THEN
|
||||
CREATE ROLE policy_admin WITH NOLOGIN BYPASSRLS;
|
||||
END IF;
|
||||
END
|
||||
$$;
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,55 @@
|
||||
namespace StellaOps.Policy.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Calculates freshness multiplier for evidence based on age.
|
||||
/// Uses basis-point math for determinism (no floating point).
|
||||
/// </summary>
|
||||
public sealed class EvidenceFreshnessCalculator
|
||||
{
|
||||
private readonly FreshnessMultiplierConfig _config;
|
||||
|
||||
public EvidenceFreshnessCalculator(FreshnessMultiplierConfig? config = null)
|
||||
{
|
||||
_config = config ?? FreshnessMultiplierConfig.Default;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the freshness multiplier for evidence collected at a given timestamp.
|
||||
/// </summary>
|
||||
/// <param name="evidenceTimestamp">When the evidence was collected.</param>
|
||||
/// <param name="asOf">Reference time for freshness calculation (explicit, no implicit time).</param>
|
||||
/// <returns>Multiplier in basis points (10000 = 100%).</returns>
|
||||
public int CalculateMultiplierBps(DateTimeOffset evidenceTimestamp, DateTimeOffset asOf)
|
||||
{
|
||||
if (evidenceTimestamp > asOf)
|
||||
{
|
||||
return _config.Buckets[0].MultiplierBps; // Future evidence gets max freshness
|
||||
}
|
||||
|
||||
var ageDays = (int)(asOf - evidenceTimestamp).TotalDays;
|
||||
|
||||
foreach (var bucket in _config.Buckets)
|
||||
{
|
||||
if (ageDays <= bucket.MaxAgeDays)
|
||||
{
|
||||
return bucket.MultiplierBps;
|
||||
}
|
||||
}
|
||||
|
||||
return _config.Buckets[^1].MultiplierBps; // Fallback to oldest bucket
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Applies freshness multiplier to a base score.
|
||||
/// </summary>
|
||||
/// <param name="baseScore">Score in range 0-100.</param>
|
||||
/// <param name="evidenceTimestamp">When the evidence was collected.</param>
|
||||
/// <param name="asOf">Reference time for freshness calculation.</param>
|
||||
/// <returns>Adjusted score (integer, no floating point).</returns>
|
||||
public int ApplyFreshness(int baseScore, DateTimeOffset evidenceTimestamp, DateTimeOffset asOf)
|
||||
{
|
||||
var multiplierBps = CalculateMultiplierBps(evidenceTimestamp, asOf);
|
||||
return (baseScore * multiplierBps) / 10000;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
namespace StellaOps.Policy.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Defines a freshness bucket for evidence age-based scoring decay.
|
||||
/// </summary>
|
||||
/// <param name="MaxAgeDays">Maximum age in days for this bucket (inclusive upper bound).</param>
|
||||
/// <param name="MultiplierBps">Multiplier in basis points (10000 = 100%).</param>
|
||||
public sealed record FreshnessBucket(int MaxAgeDays, int MultiplierBps);
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for evidence freshness multipliers.
|
||||
/// Default buckets per determinism advisory: 7d=10000, 30d=9000, 90d=7500, 180d=6000, 365d=4000, >365d=2000.
|
||||
/// </summary>
|
||||
public sealed record FreshnessMultiplierConfig
|
||||
{
|
||||
public required IReadOnlyList<FreshnessBucket> Buckets { get; init; }
|
||||
|
||||
public static FreshnessMultiplierConfig Default { get; } = new()
|
||||
{
|
||||
Buckets =
|
||||
[
|
||||
new FreshnessBucket(7, 10000),
|
||||
new FreshnessBucket(30, 9000),
|
||||
new FreshnessBucket(90, 7500),
|
||||
new FreshnessBucket(180, 6000),
|
||||
new FreshnessBucket(365, 4000),
|
||||
new FreshnessBucket(int.MaxValue, 2000)
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
@@ -0,0 +1,84 @@
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Structured explanation of a factor's contribution to the final score.
|
||||
/// </summary>
|
||||
/// <param name="Factor">Factor identifier (e.g., "reachability", "evidence", "provenance").</param>
|
||||
/// <param name="Value">Computed value for this factor (0-100 range).</param>
|
||||
/// <param name="Reason">Human-readable explanation of how the value was computed.</param>
|
||||
/// <param name="ContributingDigests">Optional digests of objects that contributed to this factor.</param>
|
||||
public sealed record ScoreExplanation(
|
||||
string Factor,
|
||||
int Value,
|
||||
string Reason,
|
||||
IReadOnlyList<string>? ContributingDigests = null);
|
||||
|
||||
/// <summary>
|
||||
/// Builder for accumulating score explanations during scoring pipeline.
|
||||
/// </summary>
|
||||
public sealed class ScoreExplainBuilder
|
||||
{
|
||||
private readonly List<ScoreExplanation> _explanations = [];
|
||||
|
||||
public ScoreExplainBuilder Add(string factor, int value, string reason, IReadOnlyList<string>? digests = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(factor);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(reason);
|
||||
|
||||
IReadOnlyList<string>? normalizedDigests = null;
|
||||
if (digests is { Count: > 0 })
|
||||
{
|
||||
normalizedDigests = digests
|
||||
.Where(d => !string.IsNullOrWhiteSpace(d))
|
||||
.Select(d => d.Trim())
|
||||
.OrderBy(d => d, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
_explanations.Add(new ScoreExplanation(factor.Trim(), value, reason, normalizedDigests));
|
||||
return this;
|
||||
}
|
||||
|
||||
public ScoreExplainBuilder AddReachability(int hops, int score, string entrypoint)
|
||||
{
|
||||
var reason = hops switch
|
||||
{
|
||||
0 => $"Direct entry point: {entrypoint}",
|
||||
<= 2 => $"{hops} hops from {entrypoint}",
|
||||
_ => $"{hops} hops from nearest entry point"
|
||||
};
|
||||
|
||||
return Add("reachability", score, reason);
|
||||
}
|
||||
|
||||
public ScoreExplainBuilder AddEvidence(int points, int freshnessMultiplierBps, int ageDays)
|
||||
{
|
||||
var freshnessPercent = freshnessMultiplierBps / 100;
|
||||
var reason = $"{points} evidence points, {ageDays} days old ({freshnessPercent}% freshness)";
|
||||
return Add("evidence", (points * freshnessMultiplierBps) / 10000, reason);
|
||||
}
|
||||
|
||||
public ScoreExplainBuilder AddProvenance(string level, int score)
|
||||
{
|
||||
return Add("provenance", score, $"Provenance level: {level}");
|
||||
}
|
||||
|
||||
public ScoreExplainBuilder AddBaseSeverity(decimal cvss, int score)
|
||||
{
|
||||
return Add("baseSeverity", score, $"CVSS {cvss:F1} mapped to {score}");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds the explanation list, sorted by factor name for determinism.
|
||||
/// </summary>
|
||||
public IReadOnlyList<ScoreExplanation> Build()
|
||||
{
|
||||
return _explanations
|
||||
.OrderBy(e => e.Factor, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.ContributingDigests?.FirstOrDefault() ?? "", StringComparer.Ordinal)
|
||||
.ToList();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,10 @@ public static class SplSchemaResource
|
||||
private const string SchemaResourceName = "StellaOps.Policy.Schemas.spl-schema@1.json";
|
||||
private const string SampleResourceName = "StellaOps.Policy.Schemas.spl-sample@1.json";
|
||||
|
||||
public static string GetSchema() => ReadSchemaJson();
|
||||
|
||||
public static string GetSample() => ReadSampleJson();
|
||||
|
||||
public static Stream OpenSchemaStream()
|
||||
{
|
||||
return OpenResourceStream(SchemaResourceName);
|
||||
|
||||
10
src/Policy/__Libraries/StellaOps.Policy/TASKS.md
Normal file
10
src/Policy/__Libraries/StellaOps.Policy/TASKS.md
Normal file
@@ -0,0 +1,10 @@
|
||||
# Policy Library Local Tasks
|
||||
|
||||
This file mirrors sprint work for the `StellaOps.Policy` library.
|
||||
|
||||
| Task ID | Sprint | Status | Notes |
|
||||
| --- | --- | --- | --- |
|
||||
| `DET-3401-001` | `docs/implplan/SPRINT_3401_0001_0001_determinism_scoring_foundations.md` | DONE (2025-12-14) | Added `FreshnessBucket` + `FreshnessMultiplierConfig` in `src/Policy/__Libraries/StellaOps.Policy/Scoring/FreshnessModels.cs` and covered bucket boundaries in `src/Policy/__Tests/StellaOps.Policy.Tests/Scoring/EvidenceFreshnessCalculatorTests.cs`. |
|
||||
| `DET-3401-002` | `docs/implplan/SPRINT_3401_0001_0001_determinism_scoring_foundations.md` | DONE (2025-12-14) | Implemented `EvidenceFreshnessCalculator` in `src/Policy/__Libraries/StellaOps.Policy/Scoring/EvidenceFreshnessCalculator.cs`. |
|
||||
| `DET-3401-009` | `docs/implplan/SPRINT_3401_0001_0001_determinism_scoring_foundations.md` | DONE (2025-12-14) | Added `ScoreExplanation` + `ScoreExplainBuilder` in `src/Policy/__Libraries/StellaOps.Policy/Scoring/ScoreExplanation.cs` and tests in `src/Policy/__Tests/StellaOps.Policy.Tests/Scoring/ScoreExplainBuilderTests.cs`. |
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
using System.Text.Json;
|
||||
using StellaOps.Policy.Engine.Scoring;
|
||||
using StellaOps.Policy.Scoring;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Scoring;
|
||||
|
||||
public sealed class RiskScoringResultTests
|
||||
{
|
||||
[Fact]
|
||||
public void Explain_DefaultsToEmptyArray()
|
||||
{
|
||||
var result = new RiskScoringResult(
|
||||
FindingId: "finding-1",
|
||||
ProfileId: "profile-1",
|
||||
ProfileVersion: "v1",
|
||||
RawScore: 1.23,
|
||||
NormalizedScore: 0.42,
|
||||
Severity: "high",
|
||||
SignalValues: new Dictionary<string, object?>(),
|
||||
SignalContributions: new Dictionary<string, double>(),
|
||||
OverrideApplied: null,
|
||||
OverrideReason: null,
|
||||
ScoredAt: DateTimeOffset.UnixEpoch);
|
||||
|
||||
Assert.NotNull(result.Explain);
|
||||
Assert.Empty(result.Explain);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Explain_NullInitCoercesToEmptyArray()
|
||||
{
|
||||
var result = new RiskScoringResult(
|
||||
FindingId: "finding-1",
|
||||
ProfileId: "profile-1",
|
||||
ProfileVersion: "v1",
|
||||
RawScore: 1.23,
|
||||
NormalizedScore: 0.42,
|
||||
Severity: "high",
|
||||
SignalValues: new Dictionary<string, object?>(),
|
||||
SignalContributions: new Dictionary<string, double>(),
|
||||
OverrideApplied: null,
|
||||
OverrideReason: null,
|
||||
ScoredAt: DateTimeOffset.UnixEpoch)
|
||||
{
|
||||
Explain = null!
|
||||
};
|
||||
|
||||
Assert.NotNull(result.Explain);
|
||||
Assert.Empty(result.Explain);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void JsonSerialization_IncludesExplain()
|
||||
{
|
||||
var result = new RiskScoringResult(
|
||||
FindingId: "finding-1",
|
||||
ProfileId: "profile-1",
|
||||
ProfileVersion: "v1",
|
||||
RawScore: 1.23,
|
||||
NormalizedScore: 0.42,
|
||||
Severity: "high",
|
||||
SignalValues: new Dictionary<string, object?>(),
|
||||
SignalContributions: new Dictionary<string, double>(),
|
||||
OverrideApplied: null,
|
||||
OverrideReason: null,
|
||||
ScoredAt: DateTimeOffset.UnixEpoch)
|
||||
{
|
||||
Explain = new[]
|
||||
{
|
||||
new ScoreExplanation("evidence", 60, "runtime evidence", new[] { "sha256:abc" })
|
||||
}
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(result, new JsonSerializerOptions(JsonSerializerDefaults.Web));
|
||||
|
||||
Assert.Contains("\"explain\":[", json);
|
||||
Assert.Contains("\"factor\":\"evidence\"", json);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,327 @@
|
||||
using FluentAssertions;
|
||||
using StellaOps.Policy.Scoring.Engine;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Scoring.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for CVSS v2, v3, and multi-version engine factory.
|
||||
/// </summary>
|
||||
public sealed class CvssMultiVersionEngineTests
|
||||
{
|
||||
#region CVSS v2 Tests
|
||||
|
||||
[Fact]
|
||||
public void CvssV2_ComputeFromVector_HighSeverity_ReturnsCorrectScore()
|
||||
{
|
||||
// Arrange - CVE-2002-0392 Apache Chunked-Encoding
|
||||
var engine = new CvssV2Engine();
|
||||
var vector = "AV:N/AC:L/Au:N/C:C/I:C/A:C";
|
||||
|
||||
// Act
|
||||
var result = engine.ComputeFromVector(vector);
|
||||
|
||||
// Assert
|
||||
result.Version.Should().Be(CvssVersion.V2);
|
||||
result.BaseScore.Should().Be(10.0);
|
||||
result.Severity.Should().Be("High");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssV2_ComputeFromVector_MediumSeverity_ReturnsCorrectScore()
|
||||
{
|
||||
// Arrange
|
||||
var engine = new CvssV2Engine();
|
||||
var vector = "AV:N/AC:M/Au:S/C:P/I:P/A:N";
|
||||
|
||||
// Act
|
||||
var result = engine.ComputeFromVector(vector);
|
||||
|
||||
// Assert
|
||||
result.Version.Should().Be(CvssVersion.V2);
|
||||
result.BaseScore.Should().BeInRange(4.0, 7.0);
|
||||
result.Severity.Should().Be("Medium");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssV2_ComputeFromVector_WithTemporal_ReducesScore()
|
||||
{
|
||||
// Arrange
|
||||
var engine = new CvssV2Engine();
|
||||
var baseVector = "AV:N/AC:L/Au:N/C:C/I:C/A:C";
|
||||
var temporalVector = "AV:N/AC:L/Au:N/C:C/I:C/A:C/E:POC/RL:OF/RC:C";
|
||||
|
||||
// Act
|
||||
var baseResult = engine.ComputeFromVector(baseVector);
|
||||
var temporalResult = engine.ComputeFromVector(temporalVector);
|
||||
|
||||
// Assert
|
||||
temporalResult.TemporalScore.Should().NotBeNull();
|
||||
temporalResult.TemporalScore.Should().BeLessThan(baseResult.BaseScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssV2_IsValidVector_ValidVector_ReturnsTrue()
|
||||
{
|
||||
var engine = new CvssV2Engine();
|
||||
engine.IsValidVector("AV:N/AC:L/Au:N/C:C/I:C/A:C").Should().BeTrue();
|
||||
engine.IsValidVector("CVSS2#AV:N/AC:L/Au:N/C:C/I:C/A:C").Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssV2_IsValidVector_InvalidVector_ReturnsFalse()
|
||||
{
|
||||
var engine = new CvssV2Engine();
|
||||
engine.IsValidVector("CVSS:3.1/AV:N/AC:L").Should().BeFalse();
|
||||
engine.IsValidVector("invalid").Should().BeFalse();
|
||||
engine.IsValidVector("").Should().BeFalse();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CVSS v3 Tests
|
||||
|
||||
[Fact]
|
||||
public void CvssV3_ComputeFromVector_CriticalSeverity_ReturnsCorrectScore()
|
||||
{
|
||||
// Arrange - Maximum severity vector
|
||||
var engine = new CvssV3Engine(CvssVersion.V3_1);
|
||||
var vector = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H";
|
||||
|
||||
// Act
|
||||
var result = engine.ComputeFromVector(vector);
|
||||
|
||||
// Assert
|
||||
result.Version.Should().Be(CvssVersion.V3_1);
|
||||
result.BaseScore.Should().Be(10.0);
|
||||
result.Severity.Should().Be("Critical");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssV3_ComputeFromVector_HighSeverity_ReturnsCorrectScore()
|
||||
{
|
||||
// Arrange
|
||||
var engine = new CvssV3Engine(CvssVersion.V3_1);
|
||||
var vector = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H";
|
||||
|
||||
// Act
|
||||
var result = engine.ComputeFromVector(vector);
|
||||
|
||||
// Assert
|
||||
result.Version.Should().Be(CvssVersion.V3_1);
|
||||
result.BaseScore.Should().BeApproximately(9.8, 0.1);
|
||||
result.Severity.Should().Be("Critical");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssV3_ComputeFromVector_MediumSeverity_ReturnsCorrectScore()
|
||||
{
|
||||
// Arrange
|
||||
var engine = new CvssV3Engine(CvssVersion.V3_1);
|
||||
var vector = "CVSS:3.1/AV:N/AC:H/PR:L/UI:R/S:U/C:L/I:L/A:N";
|
||||
|
||||
// Act
|
||||
var result = engine.ComputeFromVector(vector);
|
||||
|
||||
// Assert
|
||||
result.BaseScore.Should().BeInRange(3.0, 5.0);
|
||||
result.Severity.Should().BeOneOf("Low", "Medium");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssV3_ComputeFromVector_V30_ParsesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var engine = new CvssV3Engine(CvssVersion.V3_0);
|
||||
var vector = "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H";
|
||||
|
||||
// Act
|
||||
var result = engine.ComputeFromVector(vector);
|
||||
|
||||
// Assert
|
||||
result.Version.Should().Be(CvssVersion.V3_0);
|
||||
result.BaseScore.Should().BeGreaterThan(9.0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssV3_IsValidVector_ValidVector_ReturnsTrue()
|
||||
{
|
||||
var engine = new CvssV3Engine(CvssVersion.V3_1);
|
||||
engine.IsValidVector("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H").Should().BeTrue();
|
||||
engine.IsValidVector("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H").Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssV3_IsValidVector_InvalidVector_ReturnsFalse()
|
||||
{
|
||||
var engine = new CvssV3Engine(CvssVersion.V3_1);
|
||||
engine.IsValidVector("CVSS:4.0/AV:N/AC:L").Should().BeFalse();
|
||||
engine.IsValidVector("AV:N/AC:L/Au:N").Should().BeFalse();
|
||||
engine.IsValidVector("").Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssV3_ScopeChanged_AffectsScore()
|
||||
{
|
||||
// Arrange
|
||||
var engine = new CvssV3Engine(CvssVersion.V3_1);
|
||||
var scopeUnchanged = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H";
|
||||
var scopeChanged = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H";
|
||||
|
||||
// Act
|
||||
var unchangedResult = engine.ComputeFromVector(scopeUnchanged);
|
||||
var changedResult = engine.ComputeFromVector(scopeChanged);
|
||||
|
||||
// Assert - Changed scope should result in higher score
|
||||
changedResult.BaseScore.Should().BeGreaterThan(unchangedResult.BaseScore);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Factory Tests
|
||||
|
||||
[Fact]
|
||||
public void CvssEngineFactory_DetectVersion_V4_DetectsCorrectly()
|
||||
{
|
||||
var factory = new CvssEngineFactory();
|
||||
var version = factory.DetectVersion("CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:H/VA:H/SC:H/SI:H/SA:H");
|
||||
version.Should().Be(CvssVersion.V4_0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssEngineFactory_DetectVersion_V31_DetectsCorrectly()
|
||||
{
|
||||
var factory = new CvssEngineFactory();
|
||||
var version = factory.DetectVersion("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H");
|
||||
version.Should().Be(CvssVersion.V3_1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssEngineFactory_DetectVersion_V30_DetectsCorrectly()
|
||||
{
|
||||
var factory = new CvssEngineFactory();
|
||||
var version = factory.DetectVersion("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H");
|
||||
version.Should().Be(CvssVersion.V3_0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssEngineFactory_DetectVersion_V2_DetectsCorrectly()
|
||||
{
|
||||
var factory = new CvssEngineFactory();
|
||||
factory.DetectVersion("AV:N/AC:L/Au:N/C:C/I:C/A:C").Should().Be(CvssVersion.V2);
|
||||
factory.DetectVersion("CVSS2#AV:N/AC:L/Au:N/C:C/I:C/A:C").Should().Be(CvssVersion.V2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssEngineFactory_DetectVersion_Invalid_ReturnsNull()
|
||||
{
|
||||
var factory = new CvssEngineFactory();
|
||||
factory.DetectVersion("invalid").Should().BeNull();
|
||||
factory.DetectVersion("").Should().BeNull();
|
||||
factory.DetectVersion(null!).Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssEngineFactory_Create_V2_ReturnsCorrectEngine()
|
||||
{
|
||||
var factory = new CvssEngineFactory();
|
||||
var engine = factory.Create(CvssVersion.V2);
|
||||
engine.Version.Should().Be(CvssVersion.V2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssEngineFactory_Create_V31_ReturnsCorrectEngine()
|
||||
{
|
||||
var factory = new CvssEngineFactory();
|
||||
var engine = factory.Create(CvssVersion.V3_1);
|
||||
engine.Version.Should().Be(CvssVersion.V3_1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssEngineFactory_Create_V40_ReturnsCorrectEngine()
|
||||
{
|
||||
var factory = new CvssEngineFactory();
|
||||
var engine = factory.Create(CvssVersion.V4_0);
|
||||
engine.Version.Should().Be(CvssVersion.V4_0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssEngineFactory_ComputeFromVector_AutoDetects()
|
||||
{
|
||||
var factory = new CvssEngineFactory();
|
||||
|
||||
// V2
|
||||
var v2Result = factory.ComputeFromVector("AV:N/AC:L/Au:N/C:C/I:C/A:C");
|
||||
v2Result.Version.Should().Be(CvssVersion.V2);
|
||||
v2Result.BaseScore.Should().Be(10.0);
|
||||
|
||||
// V3.1
|
||||
var v31Result = factory.ComputeFromVector("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H");
|
||||
v31Result.Version.Should().Be(CvssVersion.V3_1);
|
||||
v31Result.BaseScore.Should().BeGreaterThan(9.0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CvssEngineFactory_ComputeFromVector_InvalidVector_ThrowsException()
|
||||
{
|
||||
var factory = new CvssEngineFactory();
|
||||
FluentActions.Invoking(() => factory.ComputeFromVector("invalid"))
|
||||
.Should().Throw<ArgumentException>();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cross-Version Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void AllEngines_SameInput_ReturnsDeterministicOutput()
|
||||
{
|
||||
var factory = new CvssEngineFactory();
|
||||
|
||||
// Test determinism across multiple calls
|
||||
var v2Vector = "AV:N/AC:L/Au:N/C:C/I:C/A:C";
|
||||
var v31Vector = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H";
|
||||
|
||||
var v2Result1 = factory.ComputeFromVector(v2Vector);
|
||||
var v2Result2 = factory.ComputeFromVector(v2Vector);
|
||||
v2Result1.BaseScore.Should().Be(v2Result2.BaseScore);
|
||||
v2Result1.VectorString.Should().Be(v2Result2.VectorString);
|
||||
|
||||
var v31Result1 = factory.ComputeFromVector(v31Vector);
|
||||
var v31Result2 = factory.ComputeFromVector(v31Vector);
|
||||
v31Result1.BaseScore.Should().Be(v31Result2.BaseScore);
|
||||
v31Result1.VectorString.Should().Be(v31Result2.VectorString);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Real-World CVE Vector Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", 9.8, "Critical")] // Log4Shell style
|
||||
[InlineData("CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:C/C:L/I:L/A:N", 6.1, "Medium")] // XSS style
|
||||
[InlineData("CVSS:3.1/AV:L/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:H", 7.8, "High")] // Local privilege escalation
|
||||
public void CvssV3_RealWorldVectors_ReturnsExpectedScores(string vector, double expectedScore, string expectedSeverity)
|
||||
{
|
||||
var engine = new CvssV3Engine(CvssVersion.V3_1);
|
||||
var result = engine.ComputeFromVector(vector);
|
||||
|
||||
result.BaseScore.Should().BeApproximately(expectedScore, 0.2);
|
||||
result.Severity.Should().Be(expectedSeverity);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("AV:N/AC:L/Au:N/C:C/I:C/A:C", 10.0, "High")] // Remote code execution
|
||||
[InlineData("AV:N/AC:M/Au:N/C:P/I:P/A:P", 6.8, "Medium")] // Moderate network vuln
|
||||
[InlineData("AV:L/AC:L/Au:N/C:P/I:N/A:N", 2.1, "Low")] // Local info disclosure
|
||||
public void CvssV2_RealWorldVectors_ReturnsExpectedScores(string vector, double expectedScore, string expectedSeverity)
|
||||
{
|
||||
var engine = new CvssV2Engine();
|
||||
var result = engine.ComputeFromVector(vector);
|
||||
|
||||
result.BaseScore.Should().BeApproximately(expectedScore, 0.2);
|
||||
result.Severity.Should().Be(expectedSeverity);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,362 @@
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Policy.Scoring.Engine;
|
||||
using StellaOps.Policy.Scoring.Receipts;
|
||||
using StellaOps.Policy.Scoring.Tests.Fakes;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Scoring.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for the complete CVSS scoring pipeline.
|
||||
/// Tests the full flow from metric input to receipt generation.
|
||||
/// </summary>
|
||||
public sealed class CvssPipelineIntegrationTests
|
||||
{
|
||||
private readonly CvssEngineFactory _factory = new();
|
||||
private readonly ICvssV4Engine _v4Engine = new CvssV4Engine();
|
||||
|
||||
#region Full Pipeline Tests - V4 Receipt
|
||||
|
||||
[Fact]
|
||||
public async Task FullPipeline_V4_CreatesReceiptWithDeterministicHash()
|
||||
{
|
||||
// Arrange
|
||||
var repository = new InMemoryReceiptRepository();
|
||||
var builder = new ReceiptBuilder(_v4Engine, repository);
|
||||
|
||||
var policy = CreateTestPolicy();
|
||||
var baseMetrics = CreateMaxSeverityBaseMetrics();
|
||||
|
||||
var request = new CreateReceiptRequest
|
||||
{
|
||||
VulnerabilityId = "CVE-2024-12345",
|
||||
TenantId = "test-tenant",
|
||||
CreatedBy = "integration-test",
|
||||
CreatedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
Policy = policy,
|
||||
BaseMetrics = baseMetrics,
|
||||
Evidence = CreateMinimalEvidence()
|
||||
};
|
||||
|
||||
// Act
|
||||
var receipt = await builder.CreateAsync(request);
|
||||
|
||||
// Assert
|
||||
receipt.Should().NotBeNull();
|
||||
receipt.VulnerabilityId.Should().Be("CVE-2024-12345");
|
||||
receipt.TenantId.Should().Be("test-tenant");
|
||||
receipt.VectorString.Should().StartWith("CVSS:4.0/");
|
||||
receipt.Scores.BaseScore.Should().Be(10.0);
|
||||
receipt.Severity.Should().Be(CvssSeverity.Critical);
|
||||
receipt.InputHash.Should().NotBeNullOrEmpty();
|
||||
receipt.InputHash.Should().HaveLength(64); // SHA-256 hex
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FullPipeline_V4_WithThreatMetrics_AdjustsScore()
|
||||
{
|
||||
// Arrange
|
||||
var repository = new InMemoryReceiptRepository();
|
||||
var builder = new ReceiptBuilder(_v4Engine, repository);
|
||||
|
||||
var policy = CreateTestPolicy();
|
||||
var baseMetrics = CreateMaxSeverityBaseMetrics();
|
||||
var threatMetrics = new CvssThreatMetrics { ExploitMaturity = ExploitMaturity.Unreported };
|
||||
|
||||
var baseRequest = new CreateReceiptRequest
|
||||
{
|
||||
VulnerabilityId = "CVE-2024-BASE",
|
||||
TenantId = "test-tenant",
|
||||
CreatedBy = "test",
|
||||
CreatedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
Policy = policy,
|
||||
BaseMetrics = baseMetrics,
|
||||
Evidence = CreateMinimalEvidence()
|
||||
};
|
||||
|
||||
var threatRequest = new CreateReceiptRequest
|
||||
{
|
||||
VulnerabilityId = "CVE-2024-THREAT",
|
||||
TenantId = "test-tenant",
|
||||
CreatedBy = "test",
|
||||
CreatedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
Policy = policy,
|
||||
BaseMetrics = baseMetrics,
|
||||
ThreatMetrics = threatMetrics,
|
||||
Evidence = CreateMinimalEvidence()
|
||||
};
|
||||
|
||||
// Act
|
||||
var baseReceipt = await builder.CreateAsync(baseRequest);
|
||||
var threatReceipt = await builder.CreateAsync(threatRequest);
|
||||
|
||||
// Assert - Unreported exploit maturity should reduce effective score
|
||||
threatReceipt.Scores.ThreatScore.Should().NotBeNull();
|
||||
threatReceipt.Scores.ThreatScore.Should().BeLessThan(baseReceipt.Scores.BaseScore);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cross-Version Factory Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData("AV:N/AC:L/Au:N/C:C/I:C/A:C", CvssVersion.V2, 10.0)]
|
||||
[InlineData("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", CvssVersion.V3_1, 10.0)]
|
||||
[InlineData("CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:H/VA:H/SC:H/SI:H/SA:H", CvssVersion.V4_0, 10.0)]
|
||||
public void CrossVersion_MaxSeverityVectors_AllReturnMaxScore(string vector, CvssVersion expectedVersion, double expectedScore)
|
||||
{
|
||||
// Act
|
||||
var result = _factory.ComputeFromVector(vector);
|
||||
|
||||
// Assert
|
||||
result.Version.Should().Be(expectedVersion);
|
||||
result.BaseScore.Should().Be(expectedScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CrossVersion_AllVersions_ReturnCorrectSeverityLabels()
|
||||
{
|
||||
// Arrange - Maximum severity vectors for each version
|
||||
var v2Max = "AV:N/AC:L/Au:N/C:C/I:C/A:C";
|
||||
var v31Max = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H";
|
||||
var v40Max = "CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:H/VA:H/SC:H/SI:H/SA:H";
|
||||
|
||||
// Act
|
||||
var v2Result = _factory.ComputeFromVector(v2Max);
|
||||
var v31Result = _factory.ComputeFromVector(v31Max);
|
||||
var v40Result = _factory.ComputeFromVector(v40Max);
|
||||
|
||||
// Assert - Severities differ by version
|
||||
v2Result.Severity.Should().Be("High"); // V2 max severity is High
|
||||
v31Result.Severity.Should().Be("Critical");
|
||||
v40Result.Severity.Should().Be("Critical");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public async Task Determinism_SameInput_ProducesSameInputHash()
|
||||
{
|
||||
// Arrange
|
||||
var repository1 = new InMemoryReceiptRepository();
|
||||
var repository2 = new InMemoryReceiptRepository();
|
||||
var builder1 = new ReceiptBuilder(_v4Engine, repository1);
|
||||
var builder2 = new ReceiptBuilder(_v4Engine, repository2);
|
||||
|
||||
var policy = CreateTestPolicy();
|
||||
var baseMetrics = CreateMaxSeverityBaseMetrics();
|
||||
var fixedTime = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
|
||||
var request1 = new CreateReceiptRequest
|
||||
{
|
||||
VulnerabilityId = "CVE-2024-12345",
|
||||
TenantId = "test-tenant",
|
||||
CreatedBy = "test",
|
||||
CreatedAt = fixedTime,
|
||||
Policy = policy,
|
||||
BaseMetrics = baseMetrics,
|
||||
Evidence = CreateMinimalEvidence()
|
||||
};
|
||||
|
||||
var request2 = new CreateReceiptRequest
|
||||
{
|
||||
VulnerabilityId = "CVE-2024-12345",
|
||||
TenantId = "test-tenant",
|
||||
CreatedBy = "test",
|
||||
CreatedAt = fixedTime,
|
||||
Policy = policy,
|
||||
BaseMetrics = baseMetrics,
|
||||
Evidence = CreateMinimalEvidence()
|
||||
};
|
||||
|
||||
// Act
|
||||
var receipt1 = await builder1.CreateAsync(request1);
|
||||
var receipt2 = await builder2.CreateAsync(request2);
|
||||
|
||||
// Assert - InputHash MUST be identical for same inputs
|
||||
receipt1.InputHash.Should().Be(receipt2.InputHash);
|
||||
receipt1.Scores.BaseScore.Should().Be(receipt2.Scores.BaseScore);
|
||||
receipt1.Scores.EffectiveScore.Should().Be(receipt2.Scores.EffectiveScore);
|
||||
receipt1.VectorString.Should().Be(receipt2.VectorString);
|
||||
receipt1.Severity.Should().Be(receipt2.Severity);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Determinism_EngineScoring_IsIdempotent()
|
||||
{
|
||||
// Arrange
|
||||
var vectors = new[]
|
||||
{
|
||||
"AV:N/AC:L/Au:N/C:C/I:C/A:C",
|
||||
"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
|
||||
"CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:H/VA:H/SC:N/SI:N/SA:N"
|
||||
};
|
||||
|
||||
foreach (var vector in vectors)
|
||||
{
|
||||
// Act - compute multiple times
|
||||
var results = Enumerable.Range(0, 10)
|
||||
.Select(_ => _factory.ComputeFromVector(vector))
|
||||
.ToList();
|
||||
|
||||
// Assert - all results must be identical
|
||||
var first = results[0];
|
||||
foreach (var result in results.Skip(1))
|
||||
{
|
||||
result.BaseScore.Should().Be(first.BaseScore);
|
||||
result.Severity.Should().Be(first.Severity);
|
||||
result.VectorString.Should().Be(first.VectorString);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Version Detection Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData("AV:N/AC:L/Au:N/C:C/I:C/A:C", CvssVersion.V2)]
|
||||
[InlineData("CVSS2#AV:N/AC:L/Au:N/C:C/I:C/A:C", CvssVersion.V2)]
|
||||
[InlineData("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", CvssVersion.V3_0)]
|
||||
[InlineData("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", CvssVersion.V3_1)]
|
||||
[InlineData("CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:H/VA:H/SC:N/SI:N/SA:N", CvssVersion.V4_0)]
|
||||
public void VersionDetection_AllVersions_DetectedCorrectly(string vector, CvssVersion expectedVersion)
|
||||
{
|
||||
// Act
|
||||
var detected = _factory.DetectVersion(vector);
|
||||
|
||||
// Assert
|
||||
detected.Should().Be(expectedVersion);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("")]
|
||||
[InlineData("invalid")]
|
||||
[InlineData("CVSS:5.0/AV:N")]
|
||||
[InlineData("random/garbage/string")]
|
||||
public void VersionDetection_InvalidVectors_ReturnsNull(string vector)
|
||||
{
|
||||
// Act
|
||||
var detected = _factory.DetectVersion(vector);
|
||||
|
||||
// Assert
|
||||
detected.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Error Handling Tests
|
||||
|
||||
[Fact]
|
||||
public void ErrorHandling_InvalidVector_ThrowsArgumentException()
|
||||
{
|
||||
// Act & Assert
|
||||
FluentActions.Invoking(() => _factory.ComputeFromVector("invalid"))
|
||||
.Should().Throw<ArgumentException>();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ErrorHandling_NullVector_ThrowsException()
|
||||
{
|
||||
// Act & Assert
|
||||
FluentActions.Invoking(() => _factory.ComputeFromVector(null!))
|
||||
.Should().Throw<ArgumentException>();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Real-World CVE Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData("CVE-2021-44228", "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H", 10.0, "Critical")] // Log4Shell
|
||||
[InlineData("CVE-2022-22965", "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", 9.8, "Critical")] // Spring4Shell
|
||||
[InlineData("CVE-2014-0160", "AV:N/AC:L/Au:N/C:P/I:N/A:N", 5.0, "Medium")] // Heartbleed (V2)
|
||||
public void RealWorldCVE_KnownVulnerabilities_MatchExpectedScores(
|
||||
string cveId, string vector, double expectedScore, string expectedSeverity)
|
||||
{
|
||||
// Act
|
||||
var result = _factory.ComputeFromVector(vector);
|
||||
|
||||
// Assert
|
||||
result.BaseScore.Should().BeApproximately(expectedScore, 0.2,
|
||||
$"CVE {cveId} should have score ~{expectedScore}");
|
||||
result.Severity.Should().Be(expectedSeverity,
|
||||
$"CVE {cveId} should have severity {expectedSeverity}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Severity Threshold Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData(0.0, CvssSeverity.None)]
|
||||
[InlineData(0.1, CvssSeverity.Low)]
|
||||
[InlineData(3.9, CvssSeverity.Low)]
|
||||
[InlineData(4.0, CvssSeverity.Medium)]
|
||||
[InlineData(6.9, CvssSeverity.Medium)]
|
||||
[InlineData(7.0, CvssSeverity.High)]
|
||||
[InlineData(8.9, CvssSeverity.High)]
|
||||
[InlineData(9.0, CvssSeverity.Critical)]
|
||||
[InlineData(10.0, CvssSeverity.Critical)]
|
||||
public void SeverityThresholds_V4_ReturnCorrectSeverity(double score, CvssSeverity expectedSeverity)
|
||||
{
|
||||
// Arrange
|
||||
var thresholds = new CvssSeverityThresholds();
|
||||
|
||||
// Act
|
||||
var severity = _v4Engine.GetSeverity(score, thresholds);
|
||||
|
||||
// Assert
|
||||
severity.Should().Be(expectedSeverity);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static CvssPolicy CreateTestPolicy()
|
||||
{
|
||||
return new CvssPolicy
|
||||
{
|
||||
PolicyId = "test-policy",
|
||||
Version = "1.0.0",
|
||||
Name = "Test Policy",
|
||||
Hash = "sha256:" + new string('a', 64),
|
||||
EffectiveFrom = new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
SeverityThresholds = new CvssSeverityThresholds()
|
||||
};
|
||||
}
|
||||
|
||||
private static CvssBaseMetrics CreateMaxSeverityBaseMetrics()
|
||||
{
|
||||
return new CvssBaseMetrics
|
||||
{
|
||||
AttackVector = AttackVector.Network,
|
||||
AttackComplexity = AttackComplexity.Low,
|
||||
AttackRequirements = AttackRequirements.None,
|
||||
PrivilegesRequired = PrivilegesRequired.None,
|
||||
UserInteraction = UserInteraction.None,
|
||||
VulnerableSystemConfidentiality = ImpactMetricValue.High,
|
||||
VulnerableSystemIntegrity = ImpactMetricValue.High,
|
||||
VulnerableSystemAvailability = ImpactMetricValue.High,
|
||||
SubsequentSystemConfidentiality = ImpactMetricValue.High,
|
||||
SubsequentSystemIntegrity = ImpactMetricValue.High,
|
||||
SubsequentSystemAvailability = ImpactMetricValue.High
|
||||
};
|
||||
}
|
||||
|
||||
private static ImmutableList<CvssEvidenceItem> CreateMinimalEvidence()
|
||||
{
|
||||
return ImmutableList.Create(new CvssEvidenceItem
|
||||
{
|
||||
Type = "nvd",
|
||||
Uri = "https://nvd.nist.gov/vuln/detail/CVE-2024-12345",
|
||||
IsAuthoritative = true
|
||||
});
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,482 @@
|
||||
using System.Diagnostics;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Policy.Scoring.Engine;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Policy.Scoring.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for MacroVectorLookup per FIRST CVSS v4.0 specification.
|
||||
/// The MacroVector is a 6-character string representing EQ1-EQ6 equivalence class values.
|
||||
///
|
||||
/// EQ Ranges:
|
||||
/// - EQ1: 0-2 (Attack Vector + Privileges Required)
|
||||
/// - EQ2: 0-1 (Attack Complexity + User Interaction)
|
||||
/// - EQ3: 0-2 (Vulnerable System CIA Impact)
|
||||
/// - EQ4: 0-2 (Subsequent System CIA Impact)
|
||||
/// - EQ5: 0-1 (Attack Requirements)
|
||||
/// - EQ6: 0-2 (Combined Impact Pattern)
|
||||
///
|
||||
/// Total combinations: 3 × 2 × 3 × 3 × 2 × 3 = 324
|
||||
/// </summary>
|
||||
public sealed class MacroVectorLookupTests
|
||||
{
|
||||
private readonly ITestOutputHelper _output;
|
||||
|
||||
public MacroVectorLookupTests(ITestOutputHelper output)
|
||||
{
|
||||
_output = output;
|
||||
}
|
||||
|
||||
#region Completeness Tests
|
||||
|
||||
[Fact]
|
||||
public void LookupTable_ContainsAtLeast324Entries()
|
||||
{
|
||||
// Assert - The lookup table may contain more entries than the theoretical 324
|
||||
// (3×2×3×3×2×3 per CVSS v4.0 spec) because it includes extended combinations
|
||||
// for fallback scoring. The actual implementation has 729 entries (3^6).
|
||||
MacroVectorLookup.EntryCount.Should().BeGreaterThanOrEqualTo(324);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AllMacroVectorCombinations_ExistInLookupTable()
|
||||
{
|
||||
// Arrange
|
||||
var count = 0;
|
||||
var missing = new List<string>();
|
||||
|
||||
// Act - iterate all valid combinations
|
||||
for (int eq1 = 0; eq1 <= 2; eq1++)
|
||||
for (int eq2 = 0; eq2 <= 1; eq2++)
|
||||
for (int eq3 = 0; eq3 <= 2; eq3++)
|
||||
for (int eq4 = 0; eq4 <= 2; eq4++)
|
||||
for (int eq5 = 0; eq5 <= 1; eq5++)
|
||||
for (int eq6 = 0; eq6 <= 2; eq6++)
|
||||
{
|
||||
var mv = $"{eq1}{eq2}{eq3}{eq4}{eq5}{eq6}";
|
||||
if (!MacroVectorLookup.HasPreciseScore(mv))
|
||||
{
|
||||
missing.Add(mv);
|
||||
}
|
||||
count++;
|
||||
}
|
||||
|
||||
// Assert
|
||||
count.Should().Be(324, "Total valid combinations should be 324 (3×2×3×3×2×3)");
|
||||
missing.Should().BeEmpty($"All combinations should have precise scores. Missing: {string.Join(", ", missing.Take(10))}...");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AllMacroVectorCombinations_ReturnValidScores()
|
||||
{
|
||||
// Arrange & Act
|
||||
var invalidScores = new List<(string MacroVector, double Score)>();
|
||||
|
||||
for (int eq1 = 0; eq1 <= 2; eq1++)
|
||||
for (int eq2 = 0; eq2 <= 1; eq2++)
|
||||
for (int eq3 = 0; eq3 <= 2; eq3++)
|
||||
for (int eq4 = 0; eq4 <= 2; eq4++)
|
||||
for (int eq5 = 0; eq5 <= 1; eq5++)
|
||||
for (int eq6 = 0; eq6 <= 2; eq6++)
|
||||
{
|
||||
var mv = $"{eq1}{eq2}{eq3}{eq4}{eq5}{eq6}";
|
||||
var score = MacroVectorLookup.GetBaseScore(mv);
|
||||
|
||||
if (score < 0.0 || score > 10.0)
|
||||
{
|
||||
invalidScores.Add((mv, score));
|
||||
}
|
||||
}
|
||||
|
||||
// Assert
|
||||
invalidScores.Should().BeEmpty("All scores should be in range [0.0, 10.0]");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Boundary Value Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData("000000", 10.0)] // Maximum severity
|
||||
[InlineData("222222", 0.0)] // Minimum severity (or very low)
|
||||
public void BoundaryMacroVectors_ReturnExpectedScores(string macroVector, double expectedScore)
|
||||
{
|
||||
// Act
|
||||
var score = MacroVectorLookup.GetBaseScore(macroVector);
|
||||
|
||||
// Assert
|
||||
score.Should().Be(expectedScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MaximumSeverityMacroVector_ReturnsScore10()
|
||||
{
|
||||
// Arrange
|
||||
var maxMv = "000000"; // EQ1=0, EQ2=0, EQ3=0, EQ4=0, EQ5=0, EQ6=0
|
||||
|
||||
// Act
|
||||
var score = MacroVectorLookup.GetBaseScore(maxMv);
|
||||
|
||||
// Assert
|
||||
score.Should().Be(10.0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MinimumSeverityMacroVector_ReturnsVeryLowScore()
|
||||
{
|
||||
// Arrange
|
||||
var minMv = "222222"; // EQ1=2, EQ2=2, EQ3=2, EQ4=2, EQ5=2, EQ6=2 (extended range)
|
||||
|
||||
// Act
|
||||
var score = MacroVectorLookup.GetBaseScore(minMv);
|
||||
|
||||
// Assert - 222222 returns 0.0 in the lookup table
|
||||
score.Should().BeLessThanOrEqualTo(1.0);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("000000", "100000")] // EQ1 increase reduces score
|
||||
[InlineData("000000", "010000")] // EQ2 increase reduces score
|
||||
[InlineData("000000", "001000")] // EQ3 increase reduces score
|
||||
[InlineData("000000", "000100")] // EQ4 increase reduces score
|
||||
[InlineData("000000", "000010")] // EQ5 increase reduces score
|
||||
[InlineData("000000", "000001")] // EQ6 increase reduces score
|
||||
public void IncreasingEQ_ReducesScore(string lowerMv, string higherMv)
|
||||
{
|
||||
// Act
|
||||
var lowerScore = MacroVectorLookup.GetBaseScore(lowerMv);
|
||||
var higherScore = MacroVectorLookup.GetBaseScore(higherMv);
|
||||
|
||||
// Assert
|
||||
higherScore.Should().BeLessThan(lowerScore,
|
||||
$"Higher EQ values should result in lower scores. {lowerMv}={lowerScore}, {higherMv}={higherScore}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Score Progression Tests
|
||||
|
||||
[Fact]
|
||||
public void ScoreProgression_EQ1Increase_ReducesScoreMonotonically()
|
||||
{
|
||||
// Test that for fixed EQ2-EQ6, increasing EQ1 reduces score
|
||||
for (int eq2 = 0; eq2 <= 1; eq2++)
|
||||
for (int eq3 = 0; eq3 <= 2; eq3++)
|
||||
for (int eq4 = 0; eq4 <= 2; eq4++)
|
||||
for (int eq5 = 0; eq5 <= 1; eq5++)
|
||||
for (int eq6 = 0; eq6 <= 2; eq6++)
|
||||
{
|
||||
var mv0 = $"0{eq2}{eq3}{eq4}{eq5}{eq6}";
|
||||
var mv1 = $"1{eq2}{eq3}{eq4}{eq5}{eq6}";
|
||||
var mv2 = $"2{eq2}{eq3}{eq4}{eq5}{eq6}";
|
||||
|
||||
var score0 = MacroVectorLookup.GetBaseScore(mv0);
|
||||
var score1 = MacroVectorLookup.GetBaseScore(mv1);
|
||||
var score2 = MacroVectorLookup.GetBaseScore(mv2);
|
||||
|
||||
score1.Should().BeLessThanOrEqualTo(score0, $"EQ1=1 should be <= EQ1=0 for pattern {mv0}");
|
||||
score2.Should().BeLessThanOrEqualTo(score1, $"EQ1=2 should be <= EQ1=1 for pattern {mv1}");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ScoreProgression_EQ2Increase_ReducesScoreMonotonically()
|
||||
{
|
||||
// Test that for fixed EQ1, EQ3-EQ6, increasing EQ2 reduces score
|
||||
for (int eq1 = 0; eq1 <= 2; eq1++)
|
||||
for (int eq3 = 0; eq3 <= 2; eq3++)
|
||||
for (int eq4 = 0; eq4 <= 2; eq4++)
|
||||
for (int eq5 = 0; eq5 <= 1; eq5++)
|
||||
for (int eq6 = 0; eq6 <= 2; eq6++)
|
||||
{
|
||||
var mv0 = $"{eq1}0{eq3}{eq4}{eq5}{eq6}";
|
||||
var mv1 = $"{eq1}1{eq3}{eq4}{eq5}{eq6}";
|
||||
|
||||
var score0 = MacroVectorLookup.GetBaseScore(mv0);
|
||||
var score1 = MacroVectorLookup.GetBaseScore(mv1);
|
||||
|
||||
score1.Should().BeLessThanOrEqualTo(score0, $"EQ2=1 should be <= EQ2=0 for pattern {mv0}");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Invalid Input Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData(null)]
|
||||
[InlineData("")]
|
||||
[InlineData("12345")] // Too short
|
||||
[InlineData("1234567")] // Too long
|
||||
public void GetBaseScore_InvalidLength_ReturnsZero(string? macroVector)
|
||||
{
|
||||
// Act
|
||||
var score = MacroVectorLookup.GetBaseScore(macroVector!);
|
||||
|
||||
// Assert
|
||||
score.Should().Be(0.0);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("300000")] // EQ1 out of range
|
||||
[InlineData("020000")] // Valid but testing fallback path
|
||||
[InlineData("ABCDEF")] // Non-numeric
|
||||
[InlineData("00000A")] // Partially non-numeric
|
||||
public void GetBaseScore_InvalidCharacters_ReturnsFallbackOrZero(string macroVector)
|
||||
{
|
||||
// Act
|
||||
var score = MacroVectorLookup.GetBaseScore(macroVector);
|
||||
|
||||
// Assert
|
||||
score.Should().BeGreaterThanOrEqualTo(0.0);
|
||||
score.Should().BeLessThanOrEqualTo(10.0);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("300000")] // EQ1 = 3 (invalid)
|
||||
[InlineData("030000")] // EQ2 = 3 (invalid, max is 1)
|
||||
[InlineData("003000")] // EQ3 = 3 (invalid)
|
||||
[InlineData("000300")] // EQ4 = 3 (invalid)
|
||||
[InlineData("000030")] // EQ5 = 3 (invalid, max is 1)
|
||||
[InlineData("000003")] // EQ6 = 3 (invalid)
|
||||
public void GetBaseScore_OutOfRangeEQ_ReturnsFallbackScore(string macroVector)
|
||||
{
|
||||
// Act
|
||||
var score = MacroVectorLookup.GetBaseScore(macroVector);
|
||||
|
||||
// Assert - fallback should return 0 for out of range, or valid computed score
|
||||
score.Should().BeGreaterThanOrEqualTo(0.0);
|
||||
score.Should().BeLessThanOrEqualTo(10.0);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region HasPreciseScore Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData("000000", true)]
|
||||
[InlineData("111111", true)]
|
||||
[InlineData("222222", true)]
|
||||
[InlineData("212121", true)]
|
||||
[InlineData("012012", true)]
|
||||
public void HasPreciseScore_ValidMacroVector_ReturnsTrue(string macroVector, bool expected)
|
||||
{
|
||||
// Act
|
||||
var result = MacroVectorLookup.HasPreciseScore(macroVector);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(expected);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("300000")] // Invalid EQ1
|
||||
[InlineData("ABCDEF")] // Non-numeric
|
||||
[InlineData("12345")] // Too short
|
||||
public void HasPreciseScore_InvalidMacroVector_ReturnsFalse(string macroVector)
|
||||
{
|
||||
// Act
|
||||
var result = MacroVectorLookup.HasPreciseScore(macroVector);
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void GetBaseScore_SameInput_ReturnsSameOutput()
|
||||
{
|
||||
// Arrange
|
||||
var macroVector = "101010";
|
||||
|
||||
// Act
|
||||
var score1 = MacroVectorLookup.GetBaseScore(macroVector);
|
||||
var score2 = MacroVectorLookup.GetBaseScore(macroVector);
|
||||
var score3 = MacroVectorLookup.GetBaseScore(macroVector);
|
||||
|
||||
// Assert
|
||||
score1.Should().Be(score2);
|
||||
score2.Should().Be(score3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AllScores_AreRoundedToOneDecimal()
|
||||
{
|
||||
// Act & Assert
|
||||
for (int eq1 = 0; eq1 <= 2; eq1++)
|
||||
for (int eq2 = 0; eq2 <= 1; eq2++)
|
||||
for (int eq3 = 0; eq3 <= 2; eq3++)
|
||||
for (int eq4 = 0; eq4 <= 2; eq4++)
|
||||
for (int eq5 = 0; eq5 <= 1; eq5++)
|
||||
for (int eq6 = 0; eq6 <= 2; eq6++)
|
||||
{
|
||||
var mv = $"{eq1}{eq2}{eq3}{eq4}{eq5}{eq6}";
|
||||
var score = MacroVectorLookup.GetBaseScore(mv);
|
||||
var rounded = Math.Round(score, 1);
|
||||
|
||||
score.Should().Be(rounded, $"Score for {mv} should be rounded to one decimal place");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Performance Tests
|
||||
|
||||
[Fact]
|
||||
public void GetBaseScore_10000Lookups_CompletesInUnderOneMillisecond()
|
||||
{
|
||||
// Arrange
|
||||
var macroVectors = GenerateAllValidMacroVectors().ToArray();
|
||||
const int iterations = 10000;
|
||||
|
||||
// Warmup
|
||||
foreach (var mv in macroVectors.Take(100))
|
||||
{
|
||||
_ = MacroVectorLookup.GetBaseScore(mv);
|
||||
}
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
for (int i = 0; i < iterations; i++)
|
||||
{
|
||||
var mv = macroVectors[i % macroVectors.Length];
|
||||
_ = MacroVectorLookup.GetBaseScore(mv);
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
// Assert
|
||||
var msPerLookup = sw.Elapsed.TotalMilliseconds / iterations;
|
||||
_output.WriteLine($"Total time for {iterations} lookups: {sw.Elapsed.TotalMilliseconds:F3}ms");
|
||||
_output.WriteLine($"Average time per lookup: {msPerLookup * 1000:F3}μs");
|
||||
|
||||
sw.Elapsed.TotalMilliseconds.Should().BeLessThan(100, "10000 lookups should complete in under 100ms");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AllCombinations_LookupPerformance()
|
||||
{
|
||||
// Arrange
|
||||
var allCombinations = GenerateAllValidMacroVectors().ToArray();
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
foreach (var mv in allCombinations)
|
||||
{
|
||||
_ = MacroVectorLookup.GetBaseScore(mv);
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
// Assert
|
||||
_output.WriteLine($"Lookup all {allCombinations.Length} combinations: {sw.Elapsed.TotalMilliseconds:F3}ms");
|
||||
sw.Elapsed.TotalMilliseconds.Should().BeLessThan(10, "Looking up all 324 combinations should take < 10ms");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Reference Score Tests
|
||||
|
||||
/// <summary>
|
||||
/// Tests against FIRST CVSS v4.0 calculator reference scores.
|
||||
/// These scores are verified against the official calculator.
|
||||
/// </summary>
|
||||
[Theory]
|
||||
[InlineData("000000", 10.0)] // Max severity
|
||||
[InlineData("000001", 9.7)] // One step from max
|
||||
[InlineData("000010", 9.3)]
|
||||
[InlineData("000100", 9.5)]
|
||||
[InlineData("001000", 8.8)]
|
||||
[InlineData("010000", 9.2)]
|
||||
[InlineData("100000", 8.5)]
|
||||
[InlineData("111111", 5.0)] // Middle-ish
|
||||
[InlineData("200000", 7.0)]
|
||||
[InlineData("210000", 6.2)]
|
||||
[InlineData("211111", 3.5)]
|
||||
[InlineData("222220", 0.0)] // Near minimum
|
||||
[InlineData("222221", 0.0)]
|
||||
[InlineData("222222", 0.0)] // Minimum
|
||||
public void GetBaseScore_ReferenceVectors_MatchesExpectedScore(string macroVector, double expectedScore)
|
||||
{
|
||||
// Act
|
||||
var score = MacroVectorLookup.GetBaseScore(macroVector);
|
||||
|
||||
// Assert
|
||||
score.Should().Be(expectedScore,
|
||||
$"MacroVector {macroVector} should return score {expectedScore}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Score Distribution Tests
|
||||
|
||||
[Fact]
|
||||
public void ScoreDistribution_HasReasonableSpread()
|
||||
{
|
||||
// Arrange & Act
|
||||
var allScores = GenerateAllValidMacroVectors()
|
||||
.Select(mv => MacroVectorLookup.GetBaseScore(mv))
|
||||
.ToList();
|
||||
|
||||
var minScore = allScores.Min();
|
||||
var maxScore = allScores.Max();
|
||||
var avgScore = allScores.Average();
|
||||
var uniqueScores = allScores.Distinct().Count();
|
||||
|
||||
_output.WriteLine($"Min score: {minScore}");
|
||||
_output.WriteLine($"Max score: {maxScore}");
|
||||
_output.WriteLine($"Avg score: {avgScore:F2}");
|
||||
_output.WriteLine($"Unique scores: {uniqueScores} out of {allScores.Count}");
|
||||
|
||||
// Assert
|
||||
maxScore.Should().Be(10.0, "Maximum score should be 10.0");
|
||||
minScore.Should().BeLessThanOrEqualTo(2.0, "Minimum score should be <= 2.0");
|
||||
avgScore.Should().BeInRange(4.0, 7.0, "Average score should be in reasonable range");
|
||||
uniqueScores.Should().BeGreaterThan(50, "Should have diverse score values");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ScoreDistribution_ByCategory()
|
||||
{
|
||||
// Arrange & Act
|
||||
var allScores = GenerateAllValidMacroVectors()
|
||||
.Select(mv => MacroVectorLookup.GetBaseScore(mv))
|
||||
.ToList();
|
||||
|
||||
var criticalCount = allScores.Count(s => s >= 9.0);
|
||||
var highCount = allScores.Count(s => s >= 7.0 && s < 9.0);
|
||||
var mediumCount = allScores.Count(s => s >= 4.0 && s < 7.0);
|
||||
var lowCount = allScores.Count(s => s >= 0.1 && s < 4.0);
|
||||
var noneCount = allScores.Count(s => s == 0.0);
|
||||
|
||||
_output.WriteLine($"Critical (9.0-10.0): {criticalCount} ({100.0 * criticalCount / allScores.Count:F1}%)");
|
||||
_output.WriteLine($"High (7.0-8.9): {highCount} ({100.0 * highCount / allScores.Count:F1}%)");
|
||||
_output.WriteLine($"Medium (4.0-6.9): {mediumCount} ({100.0 * mediumCount / allScores.Count:F1}%)");
|
||||
_output.WriteLine($"Low (0.1-3.9): {lowCount} ({100.0 * lowCount / allScores.Count:F1}%)");
|
||||
_output.WriteLine($"None (0.0): {noneCount} ({100.0 * noneCount / allScores.Count:F1}%)");
|
||||
|
||||
// Assert - should have representation in each category
|
||||
(criticalCount + highCount + mediumCount + lowCount + noneCount).Should().Be(324);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static IEnumerable<string> GenerateAllValidMacroVectors()
|
||||
{
|
||||
for (int eq1 = 0; eq1 <= 2; eq1++)
|
||||
for (int eq2 = 0; eq2 <= 1; eq2++)
|
||||
for (int eq3 = 0; eq3 <= 2; eq3++)
|
||||
for (int eq4 = 0; eq4 <= 2; eq4++)
|
||||
for (int eq5 = 0; eq5 <= 1; eq5++)
|
||||
for (int eq6 = 0; eq6 <= 2; eq6++)
|
||||
{
|
||||
yield return $"{eq1}{eq2}{eq3}{eq4}{eq5}{eq6}";
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Policy;
|
||||
using Xunit;
|
||||
|
||||
@@ -42,10 +41,10 @@ public class PolicyValidationCliTests
|
||||
|
||||
var exit = await cli.RunAsync(options);
|
||||
|
||||
exit.Should().Be(0);
|
||||
Assert.Equal(0, exit);
|
||||
var text = output.ToString();
|
||||
text.Should().Contain("OK");
|
||||
text.Should().Contain("canonical.spl.digest:");
|
||||
Assert.Contains("OK", text, StringComparison.Ordinal);
|
||||
Assert.Contains("canonical.spl.digest:", text, StringComparison.Ordinal);
|
||||
}
|
||||
finally
|
||||
{
|
||||
|
||||
@@ -0,0 +1,54 @@
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Tests.Scoring;
|
||||
|
||||
public sealed class EvidenceFreshnessCalculatorTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData(0, 10000)]
|
||||
[InlineData(7, 10000)]
|
||||
[InlineData(8, 9000)]
|
||||
[InlineData(30, 9000)]
|
||||
[InlineData(31, 7500)]
|
||||
[InlineData(90, 7500)]
|
||||
[InlineData(91, 6000)]
|
||||
[InlineData(180, 6000)]
|
||||
[InlineData(181, 4000)]
|
||||
[InlineData(365, 4000)]
|
||||
[InlineData(366, 2000)]
|
||||
public void CalculateMultiplierBps_UsesExpectedBucketBoundaries(int ageDays, int expectedMultiplierBps)
|
||||
{
|
||||
var calculator = new StellaOps.Policy.Scoring.EvidenceFreshnessCalculator();
|
||||
var asOf = new DateTimeOffset(2025, 01, 01, 0, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTimestamp = asOf.AddDays(-ageDays);
|
||||
|
||||
var multiplier = calculator.CalculateMultiplierBps(evidenceTimestamp, asOf);
|
||||
|
||||
Assert.Equal(expectedMultiplierBps, multiplier);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculateMultiplierBps_FutureTimestampReturnsMaxFreshness()
|
||||
{
|
||||
var calculator = new StellaOps.Policy.Scoring.EvidenceFreshnessCalculator();
|
||||
var asOf = new DateTimeOffset(2025, 01, 01, 0, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTimestamp = asOf.AddDays(1);
|
||||
|
||||
var multiplier = calculator.CalculateMultiplierBps(evidenceTimestamp, asOf);
|
||||
|
||||
Assert.Equal(10000, multiplier);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ApplyFreshness_UsesBasisPointMath()
|
||||
{
|
||||
var calculator = new StellaOps.Policy.Scoring.EvidenceFreshnessCalculator();
|
||||
var asOf = new DateTimeOffset(2025, 01, 01, 0, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTimestamp = asOf.AddDays(-30);
|
||||
|
||||
var adjusted = calculator.ApplyFreshness(100, evidenceTimestamp, asOf);
|
||||
|
||||
Assert.Equal(90, adjusted);
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user