feat: Add PathViewer and RiskDriftCard components with templates and styles
- Implemented PathViewerComponent for visualizing reachability call paths. - Added RiskDriftCardComponent to display reachability drift results. - Created corresponding HTML templates and SCSS styles for both components. - Introduced test fixtures for reachability analysis in JSON format. - Enhanced user interaction with collapsible and expandable features in PathViewer. - Included risk trend visualization and summary metrics in RiskDriftCard.
This commit is contained in:
263
docs/cli/drift-cli.md
Normal file
263
docs/cli/drift-cli.md
Normal file
@@ -0,0 +1,263 @@
|
||||
# Drift CLI Reference
|
||||
|
||||
**Sprint:** SPRINT_3600_0004_0001
|
||||
**Task:** UI-024 - Update CLI documentation for drift commands
|
||||
|
||||
## Overview
|
||||
|
||||
The Drift CLI provides commands for detecting and analyzing reachability drift between scan results. Reachability drift occurs when the call paths to vulnerable code change between builds, potentially altering the risk profile of an application.
|
||||
|
||||
## Commands
|
||||
|
||||
### stellaops drift
|
||||
|
||||
Parent command for reachability drift operations.
|
||||
|
||||
```bash
|
||||
stellaops drift <SUBCOMMAND> [OPTIONS]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### stellaops drift compare
|
||||
|
||||
Compare reachability between two scans or graph snapshots.
|
||||
|
||||
```bash
|
||||
stellaops drift compare [OPTIONS]
|
||||
```
|
||||
|
||||
#### Required Options
|
||||
|
||||
| Option | Alias | Description |
|
||||
|--------|-------|-------------|
|
||||
| `--base <ID>` | `-b` | Base scan/graph ID or commit SHA for comparison |
|
||||
|
||||
#### Optional Options
|
||||
|
||||
| Option | Alias | Description | Default |
|
||||
|--------|-------|-------------|---------|
|
||||
| `--head <ID>` | `-h` | Head scan/graph ID or commit SHA | latest |
|
||||
| `--image <REF>` | `-i` | Container image reference (digest or tag) | - |
|
||||
| `--repo <REPO>` | `-r` | Repository reference (owner/repo) | - |
|
||||
| `--output <FMT>` | `-o` | Output format: `table`, `json`, `sarif` | `table` |
|
||||
| `--min-severity <SEV>` | | Minimum severity: `critical`, `high`, `medium`, `low`, `info` | `medium` |
|
||||
| `--only-increases` | | Only show sinks with increased reachability | `false` |
|
||||
| `--verbose` | | Enable verbose output | `false` |
|
||||
|
||||
#### Examples
|
||||
|
||||
##### Compare by scan IDs
|
||||
|
||||
```bash
|
||||
stellaops drift compare --base abc123 --head def456
|
||||
```
|
||||
|
||||
##### Compare by commit SHAs
|
||||
|
||||
```bash
|
||||
stellaops drift compare --base HEAD~1 --head HEAD --repo myorg/myapp
|
||||
```
|
||||
|
||||
##### Filter to risk increases only
|
||||
|
||||
```bash
|
||||
stellaops drift compare --base abc123 --only-increases --min-severity high
|
||||
```
|
||||
|
||||
##### Output as JSON
|
||||
|
||||
```bash
|
||||
stellaops drift compare --base abc123 --output json > drift.json
|
||||
```
|
||||
|
||||
##### Output as SARIF for CI integration
|
||||
|
||||
```bash
|
||||
stellaops drift compare --base abc123 --output sarif > drift.sarif
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### stellaops drift show
|
||||
|
||||
Display details of a previously computed drift result.
|
||||
|
||||
```bash
|
||||
stellaops drift show [OPTIONS]
|
||||
```
|
||||
|
||||
#### Required Options
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--id <ID>` | Drift result ID to display |
|
||||
|
||||
#### Optional Options
|
||||
|
||||
| Option | Alias | Description | Default |
|
||||
|--------|-------|-------------|---------|
|
||||
| `--output <FMT>` | `-o` | Output format: `table`, `json`, `sarif` | `table` |
|
||||
| `--expand-paths` | | Show full call paths instead of compressed view | `false` |
|
||||
| `--verbose` | | Enable verbose output | `false` |
|
||||
|
||||
#### Examples
|
||||
|
||||
##### Show drift result
|
||||
|
||||
```bash
|
||||
stellaops drift show --id drift-abc123
|
||||
```
|
||||
|
||||
##### Show with expanded paths
|
||||
|
||||
```bash
|
||||
stellaops drift show --id drift-abc123 --expand-paths
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Output Formats
|
||||
|
||||
### Table Format (Default)
|
||||
|
||||
Human-readable table output using Spectre.Console:
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ Reachability Drift (abc123) │
|
||||
├───────────────────────────────┬─────────────────────────────┤
|
||||
│ Metric │ Value │
|
||||
├───────────────────────────────┼─────────────────────────────┤
|
||||
│ Trend │ ↑ Increasing │
|
||||
│ Net Risk Delta │ +3 │
|
||||
│ Increased │ 4 │
|
||||
│ Decreased │ 1 │
|
||||
│ New Sinks │ 2 │
|
||||
│ Removed Sinks │ 0 │
|
||||
└───────────────────────────────┴─────────────────────────────┘
|
||||
|
||||
┌──────────────┬──────────────────────┬───────────────┬─────────────────────────┬───────┐
|
||||
│ Severity │ Sink │ CVE │ Bucket Change │ Delta │
|
||||
├──────────────┼──────────────────────┼───────────────┼─────────────────────────┼───────┤
|
||||
│ CRITICAL │ SqlConnection.Open │ CVE-2024-1234 │ Runtime → Entrypoint │ +2 │
|
||||
│ HIGH │ XmlParser.Parse │ CVE-2024-5678 │ Unknown → Direct │ +1 │
|
||||
└──────────────┴──────────────────────┴───────────────┴─────────────────────────┴───────┘
|
||||
```
|
||||
|
||||
### JSON Format
|
||||
|
||||
Structured JSON for programmatic processing:
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "abc123",
|
||||
"comparedAt": "2025-12-18T10:30:00Z",
|
||||
"baseGraphId": "base-graph-id",
|
||||
"headGraphId": "head-graph-id",
|
||||
"summary": {
|
||||
"totalSinks": 42,
|
||||
"increasedReachability": 4,
|
||||
"decreasedReachability": 1,
|
||||
"unchangedReachability": 35,
|
||||
"newSinks": 2,
|
||||
"removedSinks": 0,
|
||||
"riskTrend": "increasing",
|
||||
"netRiskDelta": 3
|
||||
},
|
||||
"driftedSinks": [
|
||||
{
|
||||
"sinkSymbol": "SqlConnection.Open",
|
||||
"cveId": "CVE-2024-1234",
|
||||
"severity": "critical",
|
||||
"previousBucket": "runtime",
|
||||
"currentBucket": "entrypoint",
|
||||
"isRiskIncrease": true,
|
||||
"riskDelta": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### SARIF Format
|
||||
|
||||
SARIF 2.1.0 output for CI/CD integration:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "2.1.0",
|
||||
"$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
|
||||
"runs": [
|
||||
{
|
||||
"tool": {
|
||||
"driver": {
|
||||
"name": "StellaOps Drift",
|
||||
"version": "1.0.0",
|
||||
"informationUri": "https://stellaops.io/docs/drift"
|
||||
}
|
||||
},
|
||||
"results": [
|
||||
{
|
||||
"ruleId": "CVE-2024-1234",
|
||||
"level": "error",
|
||||
"message": {
|
||||
"text": "Reachability changed: runtime → entrypoint"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Exit Codes
|
||||
|
||||
| Code | Description |
|
||||
|------|-------------|
|
||||
| `0` | Success (no risk increases or within threshold) |
|
||||
| `1` | Error during execution |
|
||||
| `2` | Risk increases detected |
|
||||
| `3` | Critical risk increases detected |
|
||||
|
||||
---
|
||||
|
||||
## CI/CD Integration
|
||||
|
||||
### GitHub Actions
|
||||
|
||||
```yaml
|
||||
- name: Check Reachability Drift
|
||||
run: |
|
||||
stellaops drift compare \
|
||||
--base ${{ github.event.pull_request.base.sha }} \
|
||||
--head ${{ github.sha }} \
|
||||
--repo ${{ github.repository }} \
|
||||
--output sarif > drift.sarif
|
||||
continue-on-error: true
|
||||
|
||||
- name: Upload SARIF
|
||||
uses: github/codeql-action/upload-sarif@v2
|
||||
with:
|
||||
sarif_file: drift.sarif
|
||||
```
|
||||
|
||||
### GitLab CI
|
||||
|
||||
```yaml
|
||||
drift-check:
|
||||
script:
|
||||
- stellaops drift compare --base $CI_MERGE_REQUEST_DIFF_BASE_SHA --head $CI_COMMIT_SHA --output sarif > drift.sarif
|
||||
artifacts:
|
||||
reports:
|
||||
sast: drift.sarif
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [Reachability Analysis](../reachability/README.md)
|
||||
- [Smart-Diff CLI](./smart-diff-cli.md)
|
||||
- [VEX Decisioning](../vex/decisioning.md)
|
||||
256
docs/contracts/vuln-surface-v1.md
Normal file
256
docs/contracts/vuln-surface-v1.md
Normal file
@@ -0,0 +1,256 @@
|
||||
# Vuln Surface Contract v1
|
||||
|
||||
**Sprint:** SPRINT_3700_0002_0001
|
||||
**Task:** SURF-024
|
||||
**Schema:** `stella.ops/vulnSurface@v1`
|
||||
|
||||
## Overview
|
||||
|
||||
A Vulnerability Surface represents the specific methods that changed between a vulnerable and fixed version of a package. This enables precise reachability analysis by identifying the exact "trigger" methods that are dangerous rather than treating the entire package as vulnerable.
|
||||
|
||||
## Use Cases
|
||||
|
||||
1. **Noise Reduction** - Only flag findings where code actually calls vulnerable methods
|
||||
2. **Confidence Tiers** - "Confirmed reachable" (calls trigger) vs "Potentially reachable" (uses package)
|
||||
3. **Remediation Guidance** - Show developers exactly which API calls to avoid
|
||||
4. **VEX Precision** - Automatically generate VEX "not_affected" for unreachable triggers
|
||||
|
||||
## Data Model
|
||||
|
||||
### VulnSurface
|
||||
|
||||
Root object representing a computed vulnerability surface.
|
||||
|
||||
| Field | Type | Required | Description |
|
||||
|-------|------|----------|-------------|
|
||||
| `surface_id` | integer | Yes | Database ID |
|
||||
| `cve_id` | string | Yes | CVE identifier (e.g., "CVE-2024-12345") |
|
||||
| `package_id` | string | Yes | Package identifier in PURL format |
|
||||
| `ecosystem` | string | Yes | Package ecosystem: `nuget`, `npm`, `maven`, `pypi` |
|
||||
| `vuln_version` | string | Yes | Vulnerable version analyzed |
|
||||
| `fixed_version` | string | Yes | First fixed version used for diff |
|
||||
| `sinks` | VulnSurfaceSink[] | Yes | Changed methods (vulnerability triggers) |
|
||||
| `trigger_count` | integer | Yes | Number of callers to sink methods |
|
||||
| `status` | VulnSurfaceStatus | Yes | Computation status |
|
||||
| `confidence` | number | Yes | Confidence score (0.0-1.0) |
|
||||
| `computed_at` | string | Yes | ISO 8601 timestamp |
|
||||
|
||||
### VulnSurfaceSink
|
||||
|
||||
A method that changed between vulnerable and fixed versions.
|
||||
|
||||
| Field | Type | Required | Description |
|
||||
|-------|------|----------|-------------|
|
||||
| `sink_id` | integer | Yes | Database ID |
|
||||
| `method_key` | string | Yes | Fully qualified method signature |
|
||||
| `method_name` | string | Yes | Simple method name |
|
||||
| `declaring_type` | string | Yes | Containing class/module |
|
||||
| `namespace` | string | No | Namespace/package |
|
||||
| `change_type` | MethodChangeType | Yes | How the method changed |
|
||||
| `is_public` | boolean | Yes | Whether method is publicly accessible |
|
||||
| `parameter_count` | integer | No | Number of parameters |
|
||||
| `return_type` | string | No | Return type |
|
||||
| `source_file` | string | No | Source file (from debug symbols) |
|
||||
| `start_line` | integer | No | Starting line number |
|
||||
| `end_line` | integer | No | Ending line number |
|
||||
|
||||
### VulnSurfaceTrigger
|
||||
|
||||
A call site that invokes a vulnerable sink method.
|
||||
|
||||
| Field | Type | Required | Description |
|
||||
|-------|------|----------|-------------|
|
||||
| `trigger_id` | integer | Yes | Database ID |
|
||||
| `sink_id` | integer | Yes | Reference to sink |
|
||||
| `scan_id` | UUID | Yes | Scan where trigger was found |
|
||||
| `caller_node_id` | string | Yes | Call graph node ID |
|
||||
| `caller_method_key` | string | Yes | FQN of calling method |
|
||||
| `caller_file` | string | No | Source file of caller |
|
||||
| `caller_line` | integer | No | Line number of call |
|
||||
| `reachability_bucket` | string | Yes | Reachability classification |
|
||||
| `path_length` | integer | No | Shortest path from entrypoint |
|
||||
| `confidence` | number | Yes | Confidence score (0.0-1.0) |
|
||||
| `call_type` | string | Yes | Call type: `direct`, `virtual`, `interface`, `reflection` |
|
||||
| `is_conditional` | boolean | Yes | Whether call is behind a condition |
|
||||
|
||||
## Enums
|
||||
|
||||
### VulnSurfaceStatus
|
||||
|
||||
| Value | Description |
|
||||
|-------|-------------|
|
||||
| `pending` | Surface computation queued |
|
||||
| `computing` | Currently being computed |
|
||||
| `computed` | Successfully computed |
|
||||
| `failed` | Computation failed |
|
||||
| `stale` | Needs recomputation (new version available) |
|
||||
|
||||
### MethodChangeType
|
||||
|
||||
| Value | Description |
|
||||
|-------|-------------|
|
||||
| `added` | Method added in fix (not in vulnerable version) |
|
||||
| `removed` | Method removed in fix (was in vulnerable version) |
|
||||
| `modified` | Method body changed between versions |
|
||||
| `unknown` | Change type could not be determined |
|
||||
|
||||
### Reachability Buckets
|
||||
|
||||
| Bucket | Description | Risk Level |
|
||||
|--------|-------------|------------|
|
||||
| `entrypoint` | Sink is directly exposed as entrypoint | Critical |
|
||||
| `direct` | Reachable from entrypoint with no authentication gates | High |
|
||||
| `runtime` | Reachable but behind runtime conditions/auth | Medium |
|
||||
| `unknown` | Reachability could not be determined | Medium |
|
||||
| `unreachable` | No path from any entrypoint | Low |
|
||||
|
||||
## Fingerprinting Methods
|
||||
|
||||
### cecil-il (NuGet/.NET)
|
||||
|
||||
Uses Mono.Cecil to compute SHA-256 hash of IL instruction sequence:
|
||||
|
||||
```
|
||||
IL_0000: ldarg.0
|
||||
IL_0001: call System.Object::.ctor()
|
||||
IL_0006: ret
|
||||
```
|
||||
|
||||
Normalized to remove:
|
||||
- NOP instructions
|
||||
- Debug sequence points
|
||||
- Local variable indices (replaced with placeholders)
|
||||
|
||||
### babel-ast (npm/Node.js)
|
||||
|
||||
Uses Babel to parse JavaScript/TypeScript and compute hash of normalized AST:
|
||||
|
||||
```javascript
|
||||
function vulnerable(input) {
|
||||
eval(input); // dangerous!
|
||||
}
|
||||
```
|
||||
|
||||
Normalized to remove:
|
||||
- Comments
|
||||
- Whitespace
|
||||
- Variable names (renamed to positional)
|
||||
|
||||
### asm-bytecode (Maven/Java)
|
||||
|
||||
Uses ASM to compute hash of Java bytecode:
|
||||
|
||||
```
|
||||
ALOAD 0
|
||||
INVOKESPECIAL java/lang/Object.<init>()V
|
||||
RETURN
|
||||
```
|
||||
|
||||
Normalized to remove:
|
||||
- Line number tables
|
||||
- Local variable tables
|
||||
- Stack map frames
|
||||
|
||||
### python-ast (PyPI)
|
||||
|
||||
Uses Python's `ast` module to compute hash of normalized AST:
|
||||
|
||||
```python
|
||||
def vulnerable(user_input):
|
||||
exec(user_input) # dangerous!
|
||||
```
|
||||
|
||||
Normalized to remove:
|
||||
- Docstrings
|
||||
- Comments
|
||||
- Variable names
|
||||
|
||||
## Database Schema
|
||||
|
||||
```sql
|
||||
-- Surfaces table
|
||||
CREATE TABLE scanner.vuln_surfaces (
|
||||
id UUID PRIMARY KEY,
|
||||
tenant_id UUID NOT NULL,
|
||||
cve_id TEXT NOT NULL,
|
||||
package_ecosystem TEXT NOT NULL,
|
||||
package_name TEXT NOT NULL,
|
||||
vuln_version TEXT NOT NULL,
|
||||
fixed_version TEXT,
|
||||
fingerprint_method TEXT NOT NULL,
|
||||
total_methods_vuln INTEGER,
|
||||
total_methods_fixed INTEGER,
|
||||
changed_method_count INTEGER,
|
||||
computed_at TIMESTAMPTZ DEFAULT now(),
|
||||
UNIQUE (tenant_id, cve_id, package_ecosystem, package_name, vuln_version)
|
||||
);
|
||||
|
||||
-- Sinks table
|
||||
CREATE TABLE scanner.vuln_surface_sinks (
|
||||
id UUID PRIMARY KEY,
|
||||
surface_id UUID REFERENCES scanner.vuln_surfaces(id) ON DELETE CASCADE,
|
||||
method_key TEXT NOT NULL,
|
||||
method_name TEXT NOT NULL,
|
||||
declaring_type TEXT NOT NULL,
|
||||
change_type TEXT NOT NULL,
|
||||
UNIQUE (surface_id, method_key)
|
||||
);
|
||||
|
||||
-- Triggers table
|
||||
CREATE TABLE scanner.vuln_surface_triggers (
|
||||
id UUID PRIMARY KEY,
|
||||
sink_id UUID REFERENCES scanner.vuln_surface_sinks(id) ON DELETE CASCADE,
|
||||
scan_id UUID NOT NULL,
|
||||
caller_node_id TEXT NOT NULL,
|
||||
reachability_bucket TEXT NOT NULL,
|
||||
confidence REAL NOT NULL,
|
||||
UNIQUE (sink_id, scan_id, caller_node_id)
|
||||
);
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
### POST /api/v1/surfaces/compute
|
||||
|
||||
Request surface computation for a CVE + package.
|
||||
|
||||
**Request:**
|
||||
```json
|
||||
{
|
||||
"cveId": "CVE-2024-12345",
|
||||
"ecosystem": "nuget",
|
||||
"packageName": "Newtonsoft.Json",
|
||||
"vulnVersion": "13.0.1",
|
||||
"fixedVersion": "13.0.2"
|
||||
}
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```json
|
||||
{
|
||||
"surfaceId": "uuid",
|
||||
"status": "pending"
|
||||
}
|
||||
```
|
||||
|
||||
### GET /api/v1/surfaces/{surfaceId}
|
||||
|
||||
Get computed surface with sinks.
|
||||
|
||||
### GET /api/v1/surfaces/{surfaceId}/triggers?scanId={scanId}
|
||||
|
||||
Get triggers for a surface in a specific scan.
|
||||
|
||||
## Integration Points
|
||||
|
||||
1. **Concelier** - Feeds CVE + affected version ranges
|
||||
2. **Scanner** - Computes surfaces during SBOM analysis
|
||||
3. **Call Graph** - Provides reachability analysis
|
||||
4. **VEX Lens** - Uses surfaces for automated VEX decisions
|
||||
5. **UI** - Displays surface details and trigger paths
|
||||
|
||||
## References
|
||||
|
||||
- [Vuln Surfaces Sprint](../implplan/SPRINT_3700_0002_0001_vuln_surfaces_core.md)
|
||||
- [Reachability Architecture](../reachability/README.md)
|
||||
- [RichGraph Contract](./richgraph-v1.md)
|
||||
@@ -64,12 +64,40 @@ Before starting, read:
|
||||
| 4 | T4 | DONE | Expose verification settings | Attestor Guild | Add `RekorVerificationOptions` in Configuration/ |
|
||||
| 5 | T5 | DONE | Use verifiers in HTTP client | Attestor Guild | Implement `HttpRekorClient.VerifyInclusionAsync` |
|
||||
| 6 | T6 | DONE | Stub verification behavior | Attestor Guild | Implement `StubRekorClient.VerifyInclusionAsync` |
|
||||
| 7 | T7 | BLOCKED | Wire verification pipeline | Attestor Guild | Requires T8 for offline mode before full pipeline integration |
|
||||
| 8 | T8 | BLOCKED | Add sealed/offline checkpoint mode | Attestor Guild | Depends on finalized offline checkpoint bundle format contract |
|
||||
| 9 | T9 | DONE | Add unit coverage | Attestor Guild | Add unit tests for Merkle proof verification |
|
||||
| 10 | T10 | DONE | Add integration coverage | Attestor Guild | RekorInclusionVerificationIntegrationTests.cs added |
|
||||
| 11 | T11 | DONE | Expose verification counters | Attestor Guild | Added Rekor counters to AttestorMetrics |
|
||||
| 12 | T12 | DONE | Sync docs | Attestor Guild | Added Rekor verification section to architecture.md |
|
||||
| 7 | T6a | TODO | Freeze offline checkpoint/receipt contract | Attestor Guild · AirGap Guild | Publish canonical offline layout + schema for: tlog root key, checkpoint signature, and inclusion proof pack (docs + fixtures) |
|
||||
| 8 | T6b | TODO | Add offline fixtures + validation harness | Attestor Guild | Add deterministic fixtures + parsing helpers so offline mode can be tested without network |
|
||||
| 9 | T7 | BLOCKED | Wire verification pipeline | Attestor Guild | BLOCKED on T8 (and its prerequisites T6a/T6b) before full pipeline integration |
|
||||
| 10 | T8 | BLOCKED | Add sealed/offline checkpoint mode | Attestor Guild | BLOCKED on T6a/T6b (offline checkpoint/receipt contract + fixtures) |
|
||||
| 11 | T9 | DONE | Add unit coverage | Attestor Guild | Add unit tests for Merkle proof verification |
|
||||
| 12 | T10 | DONE | Add integration coverage | Attestor Guild | RekorInclusionVerificationIntegrationTests.cs added |
|
||||
| 13 | T11 | DONE | Expose verification counters | Attestor Guild | Added Rekor counters to AttestorMetrics |
|
||||
| 14 | T12 | DONE | Sync docs | Attestor Guild | Added Rekor verification section to architecture.md |
|
||||
|
||||
---
|
||||
|
||||
## Unblock Task Notes (T6a/T6b)
|
||||
|
||||
### T6a: Freeze offline checkpoint/receipt contract
|
||||
- **Goal:** define the canonical offline inputs required to verify inclusion proofs without network access.
|
||||
- **Use these docs as the baseline (do not invent new shapes):**
|
||||
- `docs/product-advisories/14-Dec-2025 - Rekor Integration Technical Reference.md` (§13)
|
||||
- `docs/product-advisories/14-Dec-2025 - Offline and Air-Gap Technical Reference.md` (§3–4; `evidence/tlog/checkpoint.sig` + `entries/`)
|
||||
- **Minimum deliverables:**
|
||||
- A single canonical contract doc (new or existing) that answers:
|
||||
- Where the **tlog public key** comes from (file path, rotation/versioning)
|
||||
- Where the **signed checkpoint/tree head** lives (file path; signature format)
|
||||
- Where the **inclusion proof pack** lives (file path; entry + hashes; deterministic ordering rules)
|
||||
- How the checkpoint is bound to the proof pack (tree size, root hash)
|
||||
- A schema file (JSON Schema) for the on-disk checkpoint/receipt shape used by Attestor offline verification.
|
||||
|
||||
### T6b: Offline fixtures + validation harness
|
||||
- **Goal:** make offline mode testable and reproducible.
|
||||
- **Minimum deliverables:**
|
||||
- Deterministic fixtures committed under `src/Attestor/StellaOps.Attestor.Tests/Fixtures/` (checkpoint, pubkey, valid/invalid proof material).
|
||||
- Tests that verify:
|
||||
- checkpoint signature verification succeeds/fails as expected
|
||||
- recomputed Merkle root matches checkpoint for valid entries and fails for tampered fixtures
|
||||
- no network calls are required for offline mode
|
||||
|
||||
---
|
||||
|
||||
@@ -285,6 +313,7 @@ public Counter<long> CheckpointVerifyTotal { get; } // attestor.checkpoint_
|
||||
## Interlocks
|
||||
- Rekor public key distribution must be configured via `AttestorOptions` and documented for offline bundles.
|
||||
- Offline checkpoints must be pre-distributed; `AllowOfflineWithoutSignature` policy requires explicit operator intent.
|
||||
- T6a/T6b define the concrete offline checkpoint/receipt contract and fixtures; do not implement T8 until those are published and reviewed.
|
||||
|
||||
---
|
||||
|
||||
@@ -320,6 +349,7 @@ public Counter<long> CheckpointVerifyTotal { get; } // attestor.checkpoint_
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-14 | Normalised sprint file to standard template sections; started implementation and moved `T1` to `DOING`. | Implementer |
|
||||
| 2025-12-18 | Added unblock tasks (T6a/T6b) for offline checkpoint/receipt contract + fixtures; updated T7/T8 to be BLOCKED on them. | Project Mgmt |
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -160,11 +160,13 @@ External Dependencies:
|
||||
| **EPSS-3410-011** | Implement outbox event schema | DONE | Agent | 2h | `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Epss/Events/EpssUpdatedEvent.cs` |
|
||||
| **EPSS-3410-012** | Unit tests (parser, detector, flags) | DONE | Agent | 6h | `EpssCsvStreamParserTests.cs`, `EpssChangeDetectorTests.cs` |
|
||||
| **EPSS-3410-013** | Integration tests (Testcontainers) | DONE | Agent | 8h | `EpssRepositoryIntegrationTests.cs` |
|
||||
| **EPSS-3410-014** | Performance test (300k rows) | BLOCKED | Backend | 4h | Requires CI infrastructure for benchmark runs with Testcontainers + 300k row dataset. Repository uses NpgsqlBinaryImporter for bulk insert; expected <120s based on similar workloads. |
|
||||
| **EPSS-3410-013A** | Perf harness + deterministic dataset generator | TODO | Backend | 4h | Add a perf test project and deterministic 310k-row CSV generator (fixed seed, no network). Produce local run instructions and baseline output format. |
|
||||
| **EPSS-3410-013B** | CI perf runner + workflow for EPSS ingest | TODO | DevOps | 4h | Add a Gitea workflow (nightly/manual) + runner requirements so perf tests can run with Docker/Testcontainers; publish runner label/capacity requirements and artifact retention. |
|
||||
| **EPSS-3410-014** | Performance test (300k rows) | BLOCKED | Backend | 4h | BLOCKED on EPSS-3410-013A/013B. Once harness + CI runner exist, execute and record baseline (<120s) with environment details. |
|
||||
| **EPSS-3410-015** | Observability (metrics, logs, traces) | DONE | Agent | 4h | ActivitySource with tags (model_date, row_count, cve_count, duration_ms); structured logging at Info/Warning/Error levels. |
|
||||
| **EPSS-3410-016** | Documentation (runbook, troubleshooting) | DONE | Agent | 3h | Added Operations Runbook (§10) to `docs/modules/scanner/epss-integration.md` with configuration, modes, manual ingestion, troubleshooting, and monitoring guidance. |
|
||||
|
||||
**Total Estimated Effort**: 65 hours (~2 weeks for 1 developer)
|
||||
**Total Estimated Effort**: 73 hours (~2 weeks for 1 developer)
|
||||
|
||||
---
|
||||
|
||||
@@ -604,11 +606,46 @@ public async Task ComputeChanges_DetectsFlags_Correctly()
|
||||
|
||||
---
|
||||
|
||||
### EPSS-3410-013A: Perf Harness + Deterministic Dataset Generator
|
||||
|
||||
**Description**: Add an offline-friendly perf harness for EPSS ingest without committing a huge static dataset.
|
||||
|
||||
**Deliverables**:
|
||||
- New test project: `src/Scanner/__Tests/StellaOps.Scanner.Storage.Performance.Tests/`
|
||||
- Deterministic generator: 310k rows with fixed seed, stable row order, and controlled CVE distribution.
|
||||
- Test tagged so it does not run in default CI (`[Trait("Category","Performance")]` or equivalent).
|
||||
- Local run snippet (exact `dotnet test` invocation + required env vars for Testcontainers).
|
||||
|
||||
**Acceptance Criteria**:
|
||||
- [ ] Generator produces identical output across runs (same seed ⇒ same SHA-256 of CSV bytes)
|
||||
- [ ] Perf test runs locally in <= 5 minutes on a dev machine (budget validation happens in CI)
|
||||
- [ ] No network required beyond local Docker engine for Testcontainers
|
||||
|
||||
---
|
||||
|
||||
### EPSS-3410-013B: CI Perf Runner + Workflow
|
||||
|
||||
**Description**: Enable deterministic perf execution in CI with known hardware + reproducible logs.
|
||||
|
||||
**Deliverables**:
|
||||
- Gitea workflow (nightly + manual): `.gitea/workflows/epss-perf.yml`
|
||||
- Runner requirements documented (label, OS/arch, CPU/RAM, Docker/Testcontainers support).
|
||||
- Artifacts retained: perf logs + environment metadata (CPU model, cores, memory, Docker version, image digests).
|
||||
|
||||
**Acceptance Criteria**:
|
||||
- [ ] CI job can spin up PostgreSQL via Testcontainers reliably
|
||||
- [ ] Perf test output includes total duration + phase breakdowns (parse/insert/changes/current)
|
||||
- [ ] Budgets enforced only in this workflow (does not break default PR CI)
|
||||
|
||||
---
|
||||
|
||||
### EPSS-3410-014: Performance Test (300k rows)
|
||||
|
||||
**Description**: Verify ingestion meets performance budget.
|
||||
|
||||
**File**: `src/Concelier/__Tests/StellaOps.Concelier.Epss.Performance.Tests/EpssIngestPerformanceTests.cs`
|
||||
**BLOCKED ON:** EPSS-3410-013A, EPSS-3410-013B
|
||||
|
||||
**File**: `src/Scanner/__Tests/StellaOps.Scanner.Storage.Performance.Tests/EpssIngestPerformanceTests.cs` (new project)
|
||||
|
||||
**Requirements**:
|
||||
- Synthetic CSV: 310,000 rows (close to real-world)
|
||||
@@ -865,11 +902,12 @@ concelier:
|
||||
| 2025-12-18 | Completed EPSS-3410-015: Verified ActivitySource tracing with model_date, row_count, cve_count, duration_ms tags; structured logging in place. | Agent |
|
||||
| 2025-12-18 | Completed EPSS-3410-016: Added Operations Runbook (§10) to docs/modules/scanner/epss-integration.md covering config, online/bundle modes, manual trigger, troubleshooting, monitoring. | Agent |
|
||||
| 2025-12-18 | BLOCKED EPSS-3410-014: Performance test requires CI infrastructure and 300k row dataset. BULK INSERT uses NpgsqlBinaryImporter; expected to meet <120s budget. | Agent |
|
||||
| 2025-12-18 | Added unblock tasks EPSS-3410-013A/013B; EPSS-3410-014 remains BLOCKED until harness + CI perf runner/workflow are available. | Project Mgmt |
|
||||
|
||||
## Next Checkpoints
|
||||
|
||||
- Unblock performance test (014) when CI infrastructure is available.
|
||||
- Unblock performance test (EPSS-3410-014) by completing EPSS-3410-013A (harness) and EPSS-3410-013B (CI perf runner/workflow).
|
||||
- Close Scanner integration (SPRINT_3410_0002_0001).
|
||||
|
||||
**Sprint Status**: BLOCKED (1 task pending CI infrastructure)
|
||||
**Sprint Status**: BLOCKED (EPSS-3410-014 pending EPSS-3410-013B CI perf runner/workflow)
|
||||
**Approval**: _____________________ Date: ___________
|
||||
|
||||
@@ -47,8 +47,8 @@ Integrate EPSS v4 data into the Scanner WebService for vulnerability scoring and
|
||||
| 2 | EPSS-SCAN-002 | DONE | Agent | 2h | Create `EpssEvidence` record type |
|
||||
| 3 | EPSS-SCAN-003 | DONE | Agent | 4h | Implement `IEpssProvider` interface |
|
||||
| 4 | EPSS-SCAN-004 | DONE | Agent | 4h | Implement `EpssProvider` with PostgreSQL lookup |
|
||||
| 5 | EPSS-SCAN-005 | TODO | Backend | 2h | Add optional Valkey cache layer |
|
||||
| 6 | EPSS-SCAN-006 | TODO | Backend | 4h | Integrate EPSS into `ScanProcessor` |
|
||||
| 5 | EPSS-SCAN-005 | DONE | Agent | 2h | Add optional Valkey cache layer |
|
||||
| 6 | EPSS-SCAN-006 | DONE | Agent | 4h | Integrate EPSS into `ScanProcessor` via EpssEnrichmentStageExecutor |
|
||||
| 7 | EPSS-SCAN-007 | DONE | — | 2h | Add EPSS weight to scoring configuration (EpssMultiplier in ScoreExplanationWeights) |
|
||||
| 8 | EPSS-SCAN-008 | DONE | Agent | 4h | Implement `GET /epss/current` bulk lookup API |
|
||||
| 9 | EPSS-SCAN-009 | DONE | Agent | 2h | Implement `GET /epss/history` time-series API |
|
||||
@@ -132,6 +132,7 @@ scoring:
|
||||
| 2025-12-17 | Sprint created from advisory processing | Agent |
|
||||
| 2025-12-17 | EPSS-SCAN-001: Created 008_epss_integration.sql in Scanner Storage | Agent |
|
||||
| 2025-12-17 | EPSS-SCAN-012: Created docs/modules/scanner/epss-integration.md | Agent |
|
||||
| 2025-12-18 | EPSS-SCAN-005: Implemented CachingEpssProvider with Valkey cache layer. Created EpssServiceCollectionExtensions for DI registration. | Agent |
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -37,14 +37,14 @@ This sprint implements live EPSS enrichment for existing vulnerability instances
|
||||
|
||||
| # | Status | Task | Notes |
|
||||
|---|--------|------|-------|
|
||||
| 1 | TODO | Implement `EpssEnrichmentJob` service | Core enrichment logic |
|
||||
| 2 | TODO | Create `vuln_instance_triage` schema updates | Add `current_epss_*` columns |
|
||||
| 1 | DONE | Implement `EpssEnrichmentJob` service | Created EpssEnrichmentJob.cs with background processing |
|
||||
| 2 | DONE | Create `vuln_instance_triage` schema updates | Created 014_epss_triage_columns.sql with EPSS columns and batch_update_epss_triage() |
|
||||
| 3 | DONE | Implement `epss_changes` flag logic | `EpssChangeFlags` enum with NEW_SCORED, CROSSED_HIGH, BIG_JUMP, DROPPED_LOW |
|
||||
| 4 | TODO | Add efficient targeting filter | Only update instances with flags set |
|
||||
| 4 | DONE | Add efficient targeting filter | Added GetChangesAsync() to IEpssRepository; EpssEnrichmentJob uses flag filtering |
|
||||
| 5 | DONE | Implement priority band calculation | `EpssPriorityCalculator` maps percentile to CRITICAL/HIGH/MEDIUM/LOW |
|
||||
| 6 | TODO | Emit `vuln.priority.changed` event | Only when band changes |
|
||||
| 6 | DONE | Emit `vuln.priority.changed` event | Added IEpssSignalPublisher.PublishPriorityChangedAsync() in EpssEnrichmentJob |
|
||||
| 7 | DONE | Add configurable thresholds | `EpssEnrichmentOptions` with HighPercentile, HighScore, BigJumpDelta, etc. |
|
||||
| 8 | TODO | Implement bulk update optimization | Batch updates for performance |
|
||||
| 8 | DONE | Implement bulk update optimization | Added batch_update_epss_triage() PostgreSQL function |
|
||||
| 9 | DONE | Add `EpssEnrichmentOptions` configuration | Environment-specific settings in Scanner.Core.Configuration |
|
||||
| 10 | TODO | Create unit tests for enrichment logic | Flag detection, band calculation |
|
||||
| 11 | TODO | Create integration tests | End-to-end enrichment flow |
|
||||
@@ -58,10 +58,12 @@ This sprint implements live EPSS enrichment for existing vulnerability instances
|
||||
|
||||
| # | Status | Task | Notes |
|
||||
|---|--------|------|-------|
|
||||
| R1 | TODO | Create `epss_raw` table migration | `011_epss_raw_layer.sql` - Full JSONB payload storage |
|
||||
| R2 | TODO | Update `EpssIngestJob` to store raw payload | Decompress CSV, convert to JSONB array, store in `epss_raw` |
|
||||
| R3 | TODO | Add retention policy for raw data | `prune_epss_raw()` function - Keep 365 days |
|
||||
| R4 | TODO | Implement `ReplayFromRawAsync()` method | Re-normalize from stored raw without re-downloading |
|
||||
| R1 | DONE | Create `epss_raw` table migration | `011_epss_raw_layer.sql` - Full JSONB payload storage |
|
||||
| R2 | DONE | Update `EpssIngestJob` to store raw payload | Added StoreRawPayloadAsync(), converts to JSONB, stores in `epss_raw` |
|
||||
| R3 | DONE | Add retention policy for raw data | `prune_epss_raw()` function in migration - Keep 365 days |
|
||||
| R4 | DONE | Implement `ReplayFromRawAsync()` method | Created EpssReplayService with ReplayFromRawAsync() and ReplayRangeAsync() |
|
||||
| R5 | DONE | Implement `IEpssRawRepository` interface | Created with CRUD operations |
|
||||
| R6 | DONE | Implement `PostgresEpssRawRepository` | PostgreSQL implementation with DI registration |
|
||||
|
||||
### Signal-Ready Layer Tasks (S1-S12)
|
||||
|
||||
@@ -69,16 +71,16 @@ This sprint implements live EPSS enrichment for existing vulnerability instances
|
||||
|
||||
| # | Status | Task | Notes |
|
||||
|---|--------|------|-------|
|
||||
| S1 | TODO | Create `epss_signal` table migration | `012_epss_signal_layer.sql` - Tenant-scoped with dedupe_key |
|
||||
| S2 | TODO | Implement `IEpssSignalRepository` interface | Signal CRUD operations |
|
||||
| S3 | TODO | Implement `PostgresEpssSignalRepository` | PostgreSQL implementation |
|
||||
| S4 | TODO | Implement `ComputeExplainHash()` | Deterministic SHA-256 of signal inputs |
|
||||
| S5 | TODO | Create `EpssSignalJob` service | Runs after enrichment, per-tenant |
|
||||
| S6 | TODO | Add "observed CVEs" filter | Only signal for CVEs in tenant's inventory |
|
||||
| S7 | TODO | Implement model version change detection | Compare vs previous day's `model_version_tag` |
|
||||
| S8 | TODO | Add `MODEL_UPDATED` event type | Summary event instead of 300k individual deltas |
|
||||
| S9 | TODO | Connect to Notify/Router | Publish to `signals.epss` topic |
|
||||
| S10 | TODO | Add signal deduplication | Idempotent via `dedupe_key` constraint |
|
||||
| S1 | DONE | Create `epss_signal` table migration | `012_epss_signal_layer.sql` - Tenant-scoped with dedupe_key |
|
||||
| S2 | DONE | Implement `IEpssSignalRepository` interface | Signal CRUD operations with config support |
|
||||
| S3 | DONE | Implement `PostgresEpssSignalRepository` | PostgreSQL implementation with DI registration |
|
||||
| S4 | DONE | Implement `ComputeExplainHash()` | Created EpssExplainHashCalculator with deterministic SHA-256 |
|
||||
| S5 | DONE | Create `EpssSignalJob` service | Created EpssSignalJob.cs with batch processing and tenant support |
|
||||
| S6 | DONE | Add "observed CVEs" filter | Created IObservedCveRepository and PostgresObservedCveRepository; integrated in EpssSignalJob |
|
||||
| S7 | DONE | Implement model version change detection | Added in EpssSignalJob with _lastModelVersion tracking |
|
||||
| S8 | DONE | Add `MODEL_UPDATED` event type | EmitModelUpdatedSignalAsync() creates summary event |
|
||||
| S9 | DONE | Connect to Notify/Router | Created IEpssSignalPublisher interface; EpssSignalJob publishes via PublishBatchAsync() |
|
||||
| S10 | DONE | Add signal deduplication | Idempotent via `dedupe_key` constraint in repository |
|
||||
| S11 | TODO | Unit tests for signal generation | Flag logic, explain hash, dedupe key |
|
||||
| S12 | TODO | Integration tests for signal flow | End-to-end tenant-scoped signal emission |
|
||||
| S13 | TODO | Add Prometheus metrics for signals | `epss_signals_emitted_total{event_type, tenant_id}` |
|
||||
@@ -175,15 +177,36 @@ concelier:
|
||||
|
||||
---
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date (UTC) | Update | Owner |
|
||||
|------------|--------|-------|
|
||||
| 2025-12-18 | Task #1: Implemented `EpssEnrichmentJob` with batch processing, priority band calculation, and trigger mechanism | Agent |
|
||||
| 2025-12-18 | R5-R6: Implemented `IEpssRawRepository` and `PostgresEpssRawRepository` for raw payload storage | Agent |
|
||||
| 2025-12-18 | S2-S3: Implemented `IEpssSignalRepository` and `PostgresEpssSignalRepository` with tenant config support | Agent |
|
||||
| 2025-12-18 | Registered new repositories in DI: `EpssRawRepository`, `EpssSignalRepository` | Agent |
|
||||
| 2025-12-18 | Task #2: Created 014_epss_triage_columns.sql migration with EPSS columns and batch_update_epss_triage() function | Agent |
|
||||
| 2025-12-18 | R2: Updated EpssIngestJob with StoreRawPayloadAsync() to store raw JSONB payload | Agent |
|
||||
| 2025-12-18 | S4: Created EpssExplainHashCalculator with ComputeExplainHash() and ComputeDedupeKey() | Agent |
|
||||
| 2025-12-18 | S5, S7, S8: Created EpssSignalJob with model version detection and MODEL_UPDATED event support | Agent |
|
||||
| 2025-12-18 | EPSS-SCAN-006: Created EpssEnrichmentStageExecutor for scan pipeline integration | Agent |
|
||||
| 2025-12-18 | R4: Created EpssReplayService with ReplayFromRawAsync() and ReplayRangeAsync() | Agent |
|
||||
| 2025-12-18 | S6: Created IObservedCveRepository, PostgresObservedCveRepository; integrated tenant-scoped filtering in EpssSignalJob | Agent |
|
||||
| 2025-12-18 | S9: Created IEpssSignalPublisher interface; integrated PublishBatchAsync() in EpssSignalJob | Agent |
|
||||
| 2025-12-18 | Task #4: Added GetChangesAsync() to IEpssRepository; EpssEnrichmentJob uses flag-based targeting | Agent |
|
||||
| 2025-12-18 | Task #6: Added PublishPriorityChangedAsync() to IEpssSignalPublisher; EpssEnrichmentJob emits events | Agent |
|
||||
|
||||
---
|
||||
|
||||
## Exit Criteria
|
||||
|
||||
- [ ] `EpssEnrichmentJob` updates vuln_instance_triage with current EPSS
|
||||
- [ ] Only instances with material changes are updated (flag-based targeting)
|
||||
- [ ] `vuln.priority.changed` event emitted only when band changes
|
||||
- [ ] Raw payload stored in `epss_raw` for replay capability
|
||||
- [ ] Signals emitted only for observed CVEs per tenant
|
||||
- [ ] Model version changes suppress noisy delta signals
|
||||
- [ ] Each signal has deterministic `explain_hash`
|
||||
- [x] `EpssEnrichmentJob` updates vuln_instance_triage with current EPSS
|
||||
- [x] Only instances with material changes are updated (flag-based targeting)
|
||||
- [x] `vuln.priority.changed` event emitted only when band changes
|
||||
- [x] Raw payload stored in `epss_raw` for replay capability
|
||||
- [x] Signals emitted only for observed CVEs per tenant
|
||||
- [x] Model version changes suppress noisy delta signals
|
||||
- [x] Each signal has deterministic `explain_hash`
|
||||
- [ ] All unit and integration tests pass
|
||||
- [ ] Documentation updated
|
||||
|
||||
@@ -195,17 +218,29 @@ concelier:
|
||||
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Postgres/Migrations/011_epss_raw_layer.sql`
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Postgres/Migrations/012_epss_signal_layer.sql`
|
||||
- `src/Concelier/__Libraries/StellaOps.Concelier.Epss/Services/EpssSignalJob.cs`
|
||||
- `src/Concelier/__Libraries/StellaOps.Concelier.Epss/Services/EpssExplainHashCalculator.cs`
|
||||
- `src/Concelier/__Libraries/StellaOps.Concelier.Epss/Repositories/IEpssSignalRepository.cs`
|
||||
- `src/Concelier/__Libraries/StellaOps.Concelier.Epss/Repositories/PostgresEpssSignalRepository.cs`
|
||||
- `src/Concelier/__Libraries/StellaOps.Concelier.Epss/Repositories/IEpssRawRepository.cs`
|
||||
- `src/Concelier/__Libraries/StellaOps.Concelier.Epss/Repositories/PostgresEpssRawRepository.cs`
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Postgres/Migrations/014_epss_triage_columns.sql`
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/IEpssSignalRepository.cs`
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/IEpssRawRepository.cs`
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/IObservedCveRepository.cs`
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Postgres/PostgresEpssSignalRepository.cs`
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Postgres/PostgresEpssRawRepository.cs`
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Postgres/PostgresObservedCveRepository.cs`
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Epss/EpssReplayService.cs`
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Epss/IEpssSignalPublisher.cs`
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Epss/CachingEpssProvider.cs`
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Epss/EpssExplainHashCalculator.cs`
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Extensions/EpssServiceCollectionExtensions.cs`
|
||||
- `src/Scanner/StellaOps.Scanner.Worker/Processing/EpssEnrichmentJob.cs`
|
||||
- `src/Scanner/StellaOps.Scanner.Worker/Processing/EpssEnrichmentStageExecutor.cs`
|
||||
- `src/Scanner/StellaOps.Scanner.Worker/Processing/EpssSignalJob.cs`
|
||||
|
||||
### Existing Files to Update
|
||||
### Existing Files Updated
|
||||
|
||||
- `src/Concelier/__Libraries/StellaOps.Concelier.Epss/Jobs/EpssIngestJob.cs` - Store raw payload
|
||||
- `src/Concelier/__Libraries/StellaOps.Concelier.Epss/Jobs/EpssEnrichmentJob.cs` - Add model version detection
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs` - Added EPSS repository registrations
|
||||
- `src/Scanner/__Libraries/StellaOps.Scanner.Storage/Postgres/Migrations/MigrationIds.cs` - Added new migration IDs
|
||||
- `src/Scanner/StellaOps.Scanner.Worker/Processing/ScanStageNames.cs` - Added EpssEnrichment stage
|
||||
- `src/Scanner/StellaOps.Scanner.Worker/Processing/EpssIngestJob.cs` - Added raw payload storage
|
||||
- `src/Scanner/StellaOps.Scanner.Worker/Program.cs` - Registered EpssEnrichmentStageExecutor
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -60,9 +60,9 @@ public sealed record NativeBinaryMetadata {
|
||||
| 2 | BSE-002 | DONE | Create NativeComponentEmitter |
|
||||
| 3 | BSE-003 | DONE | Create NativePurlBuilder |
|
||||
| 4 | BSE-004 | DONE | Create NativeComponentMapper (layer fragment generation) |
|
||||
| 5 | BSE-005 | DONE | Add NativeBinaryMetadata (with Imports/Exports) |
|
||||
| 6 | BSE-006 | TODO | Update CycloneDxComposer |
|
||||
| 7 | BSE-007 | TODO | Add stellaops:binary.* properties |
|
||||
| 5 | BSE-005 | DONE | Add NativeBinaryMetadata (with Imports/Exports/PE/Mach-O fields) |
|
||||
| 6 | BSE-006 | DONE | Update CycloneDxComposer via LayerComponentMapping.ToFragment() |
|
||||
| 7 | BSE-007 | DONE | Add stellaops:binary.* properties in ToComponentRecord() |
|
||||
| 8 | BSE-008 | DONE | Unit tests (22 tests passing) |
|
||||
| 9 | BSE-009 | TODO | Integration tests |
|
||||
|
||||
|
||||
@@ -48,8 +48,30 @@ Extend the Unknowns registry with native binary-specific classification reasons,
|
||||
| 1 | NUC-001 | DONE | Add UnknownKind enum values (MissingBuildId, UnknownBuildId, UnresolvedNativeLibrary, HeuristicDependency, UnsupportedBinaryFormat) |
|
||||
| 2 | NUC-002 | DONE | Create NativeUnknownContext model |
|
||||
| 3 | NUC-003 | DONE | Create NativeUnknownClassifier service |
|
||||
| 4 | NUC-004 | TODO | Integration with native analyzer |
|
||||
| 5 | NUC-005 | TODO | Unit tests |
|
||||
| 4 | NUC-003A | TODO | Approve + add `StellaOps.Unknowns.Core` reference from `src/Scanner/StellaOps.Scanner.Worker` (avoid circular deps; document final dependency direction) |
|
||||
| 5 | NUC-003B | TODO | Wire native analyzer outputs to Unknowns: call `NativeUnknownClassifier` and persist via Unknowns repository/service from scan pipeline |
|
||||
| 6 | NUC-004 | BLOCKED | Integrate with native analyzer (BLOCKED on NUC-003A/NUC-003B) |
|
||||
| 7 | NUC-005 | TODO | Unit tests |
|
||||
|
||||
---
|
||||
|
||||
## Unblock Task Notes (NUC-003A/NUC-003B)
|
||||
|
||||
### NUC-003A: Project reference + dependency direction
|
||||
- **Goal:** make the integration unambiguous: Scanner Worker emits Unknowns during scan; Unknowns.Core provides the domain + classifier.
|
||||
- **Touchpoints (expected):**
|
||||
- `src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj` (add project reference)
|
||||
- If persistence from Worker is required, also reference `src/Unknowns/__Libraries/StellaOps.Unknowns.Storage.Postgres/` and ensure migrations are applied by Scanner startup.
|
||||
- **Acceptance criteria (minimum):**
|
||||
- `dotnet build src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj` succeeds with no circular references.
|
||||
|
||||
### NUC-003B: Wiring from native analyzer to Unknowns
|
||||
- **Goal:** convert analyzer-side identification/resolution gaps into first-class Unknowns records.
|
||||
- **Touchpoints (expected):**
|
||||
- `src/Scanner/StellaOps.Scanner.Analyzers.Native/` (where classification context is produced)
|
||||
- `src/Scanner/StellaOps.Scanner.Worker/` (where results are persisted/emitted)
|
||||
- **Acceptance criteria (minimum):**
|
||||
- A missing build-id produces `UnknownKind.MissingBuildId` with a populated `NativeUnknownContext` and is visible via existing Unknowns API surfaces.
|
||||
|
||||
---
|
||||
|
||||
@@ -58,3 +80,11 @@ Extend the Unknowns registry with native binary-specific classification reasons,
|
||||
- [ ] Binaries without build-id create MissingBuildId unknowns
|
||||
- [ ] Build-IDs not in index create UnknownBuildId unknowns
|
||||
- [ ] Unknowns emit to registry, not core SBOM
|
||||
|
||||
---
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date (UTC) | Update | Owner |
|
||||
| --- | --- | --- |
|
||||
| 2025-12-18 | Added unblock tasks NUC-003A/NUC-003B; NUC-004 remains BLOCKED until dependency direction + wiring are implemented. | Project Mgmt |
|
||||
|
||||
@@ -789,12 +789,12 @@ public sealed class DriftSarifGenerator
|
||||
|---|---------|--------|-------------|-------|
|
||||
| 1 | UI-001 | DONE | Create PathNode TypeScript interface | `path-viewer.models.ts` |
|
||||
| 2 | UI-002 | DONE | Create CompressedPath TypeScript interface | `path-viewer.models.ts` |
|
||||
| 3 | UI-003 | TODO | Create PathViewerComponent | Core visualization |
|
||||
| 4 | UI-004 | TODO | Style PathViewerComponent | SCSS styling |
|
||||
| 3 | UI-003 | DONE | Create PathViewerComponent | `components/path-viewer/` |
|
||||
| 4 | UI-004 | DONE | Style PathViewerComponent | SCSS with BEM |
|
||||
| 5 | UI-005 | DONE | Create DriftedSink TypeScript interface | `drift.models.ts` |
|
||||
| 6 | UI-006 | DONE | Create DriftResult TypeScript interface | `drift.models.ts` |
|
||||
| 7 | UI-007 | TODO | Create RiskDriftCardComponent | Summary card |
|
||||
| 8 | UI-008 | TODO | Style RiskDriftCardComponent | SCSS styling |
|
||||
| 7 | UI-007 | DONE | Create RiskDriftCardComponent | `components/risk-drift-card/` |
|
||||
| 8 | UI-008 | DONE | Style RiskDriftCardComponent | SCSS with BEM |
|
||||
| 9 | UI-009 | DONE | Create drift API service | `drift-api.service.ts` |
|
||||
| 10 | UI-010 | TODO | Integrate PathViewer into scan details | Page integration |
|
||||
| 11 | UI-011 | TODO | Integrate RiskDriftCard into PR view | Page integration |
|
||||
@@ -805,12 +805,12 @@ public sealed class DriftSarifGenerator
|
||||
| 16 | UI-016 | TODO | Implement drift attestation service | DSSE signing |
|
||||
| 17 | UI-017 | TODO | Add attestation to drift API | API integration |
|
||||
| 18 | UI-018 | TODO | Unit tests for attestation | Predicate validation |
|
||||
| 19 | UI-019 | TODO | Create DriftCommand for CLI | CLI command |
|
||||
| 20 | UI-020 | TODO | Implement table output | Spectre.Console |
|
||||
| 21 | UI-021 | TODO | Implement JSON output | JSON serialization |
|
||||
| 22 | UI-022 | TODO | Create DriftSarifGenerator | SARIF 2.1.0 |
|
||||
| 23 | UI-023 | TODO | Implement SARIF output for CLI | CLI integration |
|
||||
| 24 | UI-024 | TODO | Update CLI documentation | docs/cli/ |
|
||||
| 19 | UI-019 | DONE | Create DriftCommand for CLI | `Commands/DriftCommandGroup.cs` |
|
||||
| 20 | UI-020 | DONE | Implement table output | Spectre.Console tables |
|
||||
| 21 | UI-021 | DONE | Implement JSON output | JSON serialization |
|
||||
| 22 | UI-022 | DONE | Create DriftSarifGenerator | SARIF 2.1.0 (placeholder) |
|
||||
| 23 | UI-023 | DONE | Implement SARIF output for CLI | `CommandHandlers.Drift.cs` |
|
||||
| 24 | UI-024 | DONE | Update CLI documentation | `docs/cli/drift-cli.md` |
|
||||
| 25 | UI-025 | TODO | Integration tests for CLI | End-to-end |
|
||||
|
||||
---
|
||||
|
||||
@@ -334,20 +334,20 @@ cas://reachability/graphs/{blake3:hash}/
|
||||
|---|---------|--------|-------------|
|
||||
| 1 | RWD-001 | DONE | Create ReachabilityWitnessStatement.cs |
|
||||
| 2 | RWD-002 | DONE | Create ReachabilityWitnessOptions.cs |
|
||||
| 3 | RWD-003 | TODO | Add PredicateTypes.StellaOpsReachabilityWitness |
|
||||
| 3 | RWD-003 | DONE | Add PredicateTypes.StellaOpsReachabilityWitness |
|
||||
| 4 | RWD-004 | DONE | Create ReachabilityWitnessDsseBuilder.cs |
|
||||
| 5 | RWD-005 | DONE | Create IReachabilityWitnessPublisher.cs |
|
||||
| 6 | RWD-006 | DONE | Create ReachabilityWitnessPublisher.cs |
|
||||
| 7 | RWD-007 | TODO | Implement CAS storage integration (placeholder done) |
|
||||
| 8 | RWD-008 | TODO | Implement Rekor submission (placeholder done) |
|
||||
| 9 | RWD-009 | TODO | Integrate with RichGraphWriter |
|
||||
| 10 | RWD-010 | TODO | Add service registration |
|
||||
| 9 | RWD-009 | DONE | Integrate with RichGraphWriter (AttestingRichGraphWriter) |
|
||||
| 10 | RWD-010 | DONE | Add service registration |
|
||||
| 11 | RWD-011 | DONE | Unit tests for DSSE builder (15 tests) |
|
||||
| 12 | RWD-012 | TODO | Unit tests for publisher |
|
||||
| 12 | RWD-012 | DONE | Unit tests for publisher (8 tests) |
|
||||
| 13 | RWD-013 | TODO | Integration tests with Attestor |
|
||||
| 14 | RWD-014 | TODO | Add golden fixture: graph-only.golden.json |
|
||||
| 15 | RWD-015 | TODO | Add golden fixture: graph-with-runtime.golden.json |
|
||||
| 16 | RWD-016 | TODO | Verify deterministic DSSE output |
|
||||
| 14 | RWD-014 | DONE | Add golden fixture: graph-only.golden.json |
|
||||
| 15 | RWD-015 | DONE | Add golden fixture: graph-with-runtime.golden.json |
|
||||
| 16 | RWD-016 | DONE | Verify deterministic DSSE output (4 tests) |
|
||||
|
||||
---
|
||||
|
||||
@@ -356,6 +356,9 @@ cas://reachability/graphs/{blake3:hash}/
|
||||
| Date | Update | Owner |
|
||||
|------|--------|-------|
|
||||
| 2025-12-18 | Created ReachabilityWitnessStatement, ReachabilityWitnessOptions, ReachabilityWitnessDsseBuilder, IReachabilityWitnessPublisher, ReachabilityWitnessPublisher. Created 15 DSSE builder tests. 6/16 tasks DONE. | Agent |
|
||||
| 2025-12-18 | Added PredicateTypes.StellaOpsReachabilityWitness to Signer.Core. Created ReachabilityAttestationServiceCollectionExtensions.cs for DI. Created ReachabilityWitnessPublisherTests.cs (8 tests). 9/16 tasks DONE. | Agent |
|
||||
| 2025-12-18 | Fixed PathExplanationServiceTests.cs (RichGraph/RichGraphEdge constructor updates). Fixed RichGraphWriterTests.cs assertion. All 119 tests pass. | Agent |
|
||||
| 2025-12-18 | Created AttestingRichGraphWriter.cs for integrated attestation. Created golden fixtures. Created AttestingRichGraphWriterTests.cs (4 tests). 13/16 tasks DONE. All 123 tests pass. | Agent |
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# SPRINT_3700_0001_0001 - Witness Foundation
|
||||
|
||||
**Status:** BLOCKED (2 tasks pending integration: WIT-008, WIT-009)
|
||||
**Status:** BLOCKED (WIT-008 blocked on WIT-007A/WIT-007B; WIT-009 blocked on WIT-007C/WIT-007D)
|
||||
**Priority:** P0 - CRITICAL
|
||||
**Module:** Scanner, Attestor
|
||||
**Working Directory:** `src/Scanner/__Libraries/StellaOps.Scanner.Reachability/`
|
||||
@@ -46,14 +46,38 @@ Before starting, read:
|
||||
| 5 | WIT-005 | DONE | Create PathWitness record model |
|
||||
| 6 | WIT-006 | DONE | Create IPathWitnessBuilder interface |
|
||||
| 7 | WIT-007 | DONE | Implement PathWitnessBuilder service |
|
||||
| 8 | WIT-008 | BLOCKED | Integrate with ReachabilityAnalyzer output - requires ReachabilityAnalyzer refactoring |
|
||||
| 9 | WIT-009 | BLOCKED | Add DSSE envelope generation - requires Attestor service integration |
|
||||
| 10 | WIT-010 | DONE | Create WitnessEndpoints.cs (GET /witness/{id}, list, verify) |
|
||||
| 11 | WIT-011 | DONE | Create 013_witness_storage.sql migration |
|
||||
| 12 | WIT-012 | DONE | Create PostgresWitnessRepository + IWitnessRepository |
|
||||
| 13 | WIT-013 | DONE | Add UsesBlake3HashForDefaultProfile test to RichGraphWriterTests |
|
||||
| 14 | WIT-014 | DONE | Add PathWitnessBuilderTests |
|
||||
| 15 | WIT-015 | DONE | Create docs/contracts/witness-v1.md |
|
||||
| 8 | WIT-007A | TODO | Define ReachabilityAnalyzer → PathWitnessBuilder output contract (types, ordering, limits, fixtures) |
|
||||
| 9 | WIT-007B | TODO | Refactor ReachabilityAnalyzer to surface deterministic paths to sinks (enables witness generation) |
|
||||
| 10 | WIT-007C | TODO | Define witness predicate + DSSE payloadType constants (Attestor) and align `docs/contracts/witness-v1.md` |
|
||||
| 11 | WIT-007D | TODO | Implement DSSE sign+verify for witness payload using `StellaOps.Attestor.Envelope`; add golden fixtures |
|
||||
| 12 | WIT-008 | BLOCKED | Integrate witness generation with ReachabilityAnalyzer output (BLOCKED on WIT-007A, WIT-007B) |
|
||||
| 13 | WIT-009 | BLOCKED | Add DSSE envelope generation (BLOCKED on WIT-007C, WIT-007D) |
|
||||
| 14 | WIT-010 | DONE | Create WitnessEndpoints.cs (GET /witness/{id}, list, verify) |
|
||||
| 15 | WIT-011 | DONE | Create 013_witness_storage.sql migration |
|
||||
| 16 | WIT-012 | DONE | Create PostgresWitnessRepository + IWitnessRepository |
|
||||
| 17 | WIT-013 | DONE | Add UsesBlake3HashForDefaultProfile test to RichGraphWriterTests |
|
||||
| 18 | WIT-014 | DONE | Add PathWitnessBuilderTests |
|
||||
| 19 | WIT-015 | DONE | Create docs/contracts/witness-v1.md |
|
||||
|
||||
---
|
||||
|
||||
## Unblock Task Notes (WIT-007A..WIT-007D)
|
||||
|
||||
### WIT-007A: ReachabilityAnalyzer → witness output contract
|
||||
- **Goal:** define the exact path output shape (entrypoint → sink), including stable ordering and caps (max depth/path count) so witness generation is deterministic.
|
||||
- **Touchpoints (expected):** `src/Scanner/__Libraries/StellaOps.Scanner.CallGraph/Analysis/ReachabilityAnalyzer.cs` and `src/Scanner/__Tests/StellaOps.Scanner.CallGraph.Tests/` (fixtures + determinism assertions).
|
||||
- **Evidence:** fixture graphs + expected path lists committed and validated by tests.
|
||||
|
||||
### WIT-007B: ReachabilityAnalyzer refactor (sink-aware + path export)
|
||||
- **Acceptance criteria (minimum):** analyzer accepts explicit sinks and returns deterministic path(s) per reachable sink without breaking existing tests/behaviour.
|
||||
|
||||
### WIT-007C: Witness predicate + DSSE payloadType constants
|
||||
- **Goal:** remove ambiguity about predicate URI/media type; Scanner/Attestor must sign/verify the same bytes.
|
||||
- **Touchpoints (expected):** `src/Attestor/StellaOps.Attestor/Predicates/` and `docs/contracts/witness-v1.md`.
|
||||
|
||||
### WIT-007D: DSSE signing + verification for witnesses
|
||||
- **Preferred implementation:** use `src/Attestor/StellaOps.Attestor.Envelope/` (serializer + `EnvelopeSignatureService`) for Ed25519 first.
|
||||
- **Evidence:** golden fixture payload + DSSE envelope + public key, plus unit tests proving deterministic serialization and successful verification.
|
||||
|
||||
---
|
||||
|
||||
@@ -345,7 +369,7 @@ public static class WitnessPredicates
|
||||
- [x] All existing RichGraph tests pass
|
||||
- [x] PathWitness model serializes correctly
|
||||
- [x] PathWitnessBuilder generates valid witnesses
|
||||
- [ ] DSSE signatures verify correctly (BLOCKED: WIT-009)
|
||||
- [ ] DSSE signatures verify correctly (BLOCKED: WIT-009; blocked on WIT-007C/WIT-007D)
|
||||
- [x] `/witness/{id}` endpoint returns witness JSON
|
||||
- [x] Documentation complete
|
||||
|
||||
@@ -358,8 +382,8 @@ public static class WitnessPredicates
|
||||
| WIT-DEC-001 | Use Blake3.NET library | Well-tested, MIT license |
|
||||
| WIT-DEC-002 | Store witnesses in Postgres JSONB | Flexible queries, no separate store |
|
||||
| WIT-DEC-003 | Ed25519 signatures only | Simplicity, Ed25519 is default for DSSE |
|
||||
| WIT-DEC-004 | Defer ReachabilityAnalyzer integration | Requires understanding of call flow; new sprint needed |
|
||||
| WIT-DEC-005 | Defer DSSE signing to Attestor sprint | DSSE signing belongs in Attestor module |
|
||||
| WIT-DEC-004 | Convert ReachabilityAnalyzer blocker into explicit tasks | Track contract+refactor as WIT-007A/WIT-007B; keep WIT-008 BLOCKED until complete |
|
||||
| WIT-DEC-005 | Convert DSSE signing blocker into explicit tasks | Track predicate+sign/verify as WIT-007C/WIT-007D; keep WIT-009 BLOCKED until complete |
|
||||
|
||||
| Risk | Likelihood | Impact | Mitigation |
|
||||
|------|------------|--------|------------|
|
||||
@@ -381,3 +405,4 @@ public static class WitnessPredicates
|
||||
| 2025-12-18 | Completed WIT-010: Created WitnessEndpoints.cs with GET /witnesses/{id}, list (by scan/cve/graphHash), by-hash, verify endpoints | Agent |
|
||||
| 2025-12-18 | Registered MapWitnessEndpoints() in Scanner.WebService Program.cs | Agent |
|
||||
| 2025-12-18 | Completed WIT-013: Added UsesBlake3HashForDefaultProfile test to RichGraphWriterTests.cs | Agent |
|
||||
| 2025-12-18 | Added unblock tasks WIT-007A..WIT-007D and updated WIT-008/WIT-009 dependencies accordingly. | Project Mgmt |
|
||||
|
||||
@@ -101,17 +101,17 @@ Before starting, read:
|
||||
| 11 | SURF-011 | TODO | Implement PythonAstFingerprinter |
|
||||
| 12 | SURF-012 | TODO | Create MethodKey normalizer per ecosystem |
|
||||
| 13 | SURF-013 | DONE | Create MethodDiffEngine service |
|
||||
| 14 | SURF-014 | TODO | Create 011_vuln_surfaces.sql migration |
|
||||
| 14 | SURF-014 | DONE | Create 014_vuln_surfaces.sql migration |
|
||||
| 15 | SURF-015 | DONE | Create VulnSurface, VulnSurfaceSink models |
|
||||
| 16 | SURF-016 | TODO | Create PostgresVulnSurfaceRepository |
|
||||
| 16 | SURF-016 | DONE | Create PostgresVulnSurfaceRepository |
|
||||
| 17 | SURF-017 | DONE | Create VulnSurfaceBuilder orchestrator service |
|
||||
| 18 | SURF-018 | DONE | Create IVulnSurfaceBuilder interface |
|
||||
| 19 | SURF-019 | TODO | Add surface builder metrics |
|
||||
| 20 | SURF-020 | TODO | Create NuGetDownloaderTests |
|
||||
| 21 | SURF-021 | TODO | Create CecilFingerprinterTests |
|
||||
| 22 | SURF-022 | TODO | Create MethodDiffEngineTests |
|
||||
| 19 | SURF-019 | DONE | Add surface builder metrics |
|
||||
| 20 | SURF-020 | DONE | Create NuGetDownloaderTests (9 tests) |
|
||||
| 21 | SURF-021 | DONE | Create CecilFingerprinterTests (7 tests) |
|
||||
| 22 | SURF-022 | DONE | Create MethodDiffEngineTests (8 tests) |
|
||||
| 23 | SURF-023 | TODO | Integration test with real CVE (Newtonsoft.Json) |
|
||||
| 24 | SURF-024 | TODO | Create docs/contracts/vuln-surface-v1.md |
|
||||
| 24 | SURF-024 | DONE | Create docs/contracts/vuln-surface-v1.md |
|
||||
|
||||
---
|
||||
|
||||
@@ -447,3 +447,6 @@ Expected Changed Methods:
|
||||
| Date (UTC) | Update | Owner |
|
||||
|---|---|---|
|
||||
| 2025-12-18 | Created sprint from advisory analysis | Agent |
|
||||
| 2025-12-18 | Created CecilMethodFingerprinterTests.cs (7 tests) and MethodDiffEngineTests.cs (8 tests). 12/24 tasks DONE. All 26 VulnSurfaces tests pass. | Agent |
|
||||
| 2025-12-18 | Created NuGetPackageDownloaderTests.cs (9 tests). Fixed IVulnSurfaceRepository interface/implementation mismatch. Added missing properties to VulnSurfaceSink model. 19/24 tasks DONE. All 35 VulnSurfaces tests pass. | Agent |
|
||||
| 2025-12-18 | Created VulnSurfaceMetrics.cs with counters, histograms, and gauges. Integrated metrics into VulnSurfaceBuilder. 20/24 tasks DONE. | Agent |
|
||||
320
src/Cli/StellaOps.Cli/Commands/CommandHandlers.Drift.cs
Normal file
320
src/Cli/StellaOps.Cli/Commands/CommandHandlers.Drift.cs
Normal file
@@ -0,0 +1,320 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CommandHandlers.Drift.cs
|
||||
// Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
|
||||
// Tasks: UI-019, UI-020, UI-021
|
||||
// Description: Command handlers for reachability drift CLI.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using Spectre.Console;
|
||||
|
||||
namespace StellaOps.Cli.Commands;
|
||||
|
||||
internal static partial class CommandHandlers
|
||||
{
|
||||
private static readonly JsonSerializerOptions DriftJsonOptions = new()
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Handler for `drift compare` command.
|
||||
/// </summary>
|
||||
internal static async Task HandleDriftCompareAsync(
|
||||
IServiceProvider services,
|
||||
string baseId,
|
||||
string? headId,
|
||||
string? image,
|
||||
string? repo,
|
||||
string output,
|
||||
string minSeverity,
|
||||
bool onlyIncreases,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// TODO: Replace with actual service call when drift API is available
|
||||
var console = AnsiConsole.Console;
|
||||
|
||||
if (verbose)
|
||||
{
|
||||
console.MarkupLine($"[dim]Comparing drift: base={baseId}, head={headId ?? "(latest)"}[/]");
|
||||
}
|
||||
|
||||
// Placeholder: In real implementation, call drift service
|
||||
var driftResult = new DriftResultDto
|
||||
{
|
||||
Id = Guid.NewGuid().ToString("N")[..8],
|
||||
ComparedAt = DateTimeOffset.UtcNow.ToString("O"),
|
||||
BaseGraphId = baseId,
|
||||
HeadGraphId = headId ?? "latest",
|
||||
Summary = new DriftSummaryDto
|
||||
{
|
||||
TotalSinks = 0,
|
||||
IncreasedReachability = 0,
|
||||
DecreasedReachability = 0,
|
||||
UnchangedReachability = 0,
|
||||
NewSinks = 0,
|
||||
RemovedSinks = 0,
|
||||
RiskTrend = "stable",
|
||||
NetRiskDelta = 0
|
||||
},
|
||||
DriftedSinks = Array.Empty<DriftedSinkDto>()
|
||||
};
|
||||
|
||||
switch (output)
|
||||
{
|
||||
case "json":
|
||||
await WriteJsonOutputAsync(console, driftResult, cancellationToken);
|
||||
break;
|
||||
case "sarif":
|
||||
await WriteSarifOutputAsync(console, driftResult, cancellationToken);
|
||||
break;
|
||||
default:
|
||||
WriteTableOutput(console, driftResult, onlyIncreases, minSeverity);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Handler for `drift show` command.
|
||||
/// </summary>
|
||||
internal static async Task HandleDriftShowAsync(
|
||||
IServiceProvider services,
|
||||
string id,
|
||||
string output,
|
||||
bool expandPaths,
|
||||
bool verbose,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var console = AnsiConsole.Console;
|
||||
|
||||
if (verbose)
|
||||
{
|
||||
console.MarkupLine($"[dim]Showing drift result: {id}[/]");
|
||||
}
|
||||
|
||||
// Placeholder: In real implementation, call drift service
|
||||
var driftResult = new DriftResultDto
|
||||
{
|
||||
Id = id,
|
||||
ComparedAt = DateTimeOffset.UtcNow.ToString("O"),
|
||||
BaseGraphId = "base",
|
||||
HeadGraphId = "head",
|
||||
Summary = new DriftSummaryDto
|
||||
{
|
||||
TotalSinks = 0,
|
||||
IncreasedReachability = 0,
|
||||
DecreasedReachability = 0,
|
||||
UnchangedReachability = 0,
|
||||
NewSinks = 0,
|
||||
RemovedSinks = 0,
|
||||
RiskTrend = "stable",
|
||||
NetRiskDelta = 0
|
||||
},
|
||||
DriftedSinks = Array.Empty<DriftedSinkDto>()
|
||||
};
|
||||
|
||||
switch (output)
|
||||
{
|
||||
case "json":
|
||||
await WriteJsonOutputAsync(console, driftResult, cancellationToken);
|
||||
break;
|
||||
case "sarif":
|
||||
await WriteSarifOutputAsync(console, driftResult, cancellationToken);
|
||||
break;
|
||||
default:
|
||||
WriteTableOutput(console, driftResult, false, "info");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Task: UI-020 - Table output using Spectre.Console
|
||||
private static void WriteTableOutput(
|
||||
IAnsiConsole console,
|
||||
DriftResultDto result,
|
||||
bool onlyIncreases,
|
||||
string minSeverity)
|
||||
{
|
||||
// Header panel
|
||||
var header = new Panel(new Markup($"[bold]Reachability Drift[/] [dim]({result.Id})[/]"))
|
||||
.Border(BoxBorder.Rounded)
|
||||
.Padding(1, 0);
|
||||
console.Write(header);
|
||||
|
||||
// Summary table
|
||||
var summaryTable = new Table()
|
||||
.Border(TableBorder.Rounded)
|
||||
.AddColumn("Metric")
|
||||
.AddColumn("Value");
|
||||
|
||||
summaryTable.AddRow("Trend", FormatTrend(result.Summary.RiskTrend));
|
||||
summaryTable.AddRow("Net Risk Delta", FormatDelta(result.Summary.NetRiskDelta));
|
||||
summaryTable.AddRow("Increased", result.Summary.IncreasedReachability.ToString());
|
||||
summaryTable.AddRow("Decreased", result.Summary.DecreasedReachability.ToString());
|
||||
summaryTable.AddRow("New Sinks", result.Summary.NewSinks.ToString());
|
||||
summaryTable.AddRow("Removed Sinks", result.Summary.RemovedSinks.ToString());
|
||||
|
||||
console.Write(summaryTable);
|
||||
|
||||
// Sinks table
|
||||
if (result.DriftedSinks.Length == 0)
|
||||
{
|
||||
console.MarkupLine("[green]No drifted sinks found.[/]");
|
||||
return;
|
||||
}
|
||||
|
||||
var sinksTable = new Table()
|
||||
.Border(TableBorder.Rounded)
|
||||
.AddColumn("Severity")
|
||||
.AddColumn("Sink")
|
||||
.AddColumn("CVE")
|
||||
.AddColumn("Bucket Change")
|
||||
.AddColumn("Delta");
|
||||
|
||||
var severityOrder = new Dictionary<string, int>
|
||||
{
|
||||
["critical"] = 0,
|
||||
["high"] = 1,
|
||||
["medium"] = 2,
|
||||
["low"] = 3,
|
||||
["info"] = 4
|
||||
};
|
||||
|
||||
var minSevOrder = severityOrder.GetValueOrDefault(minSeverity, 2);
|
||||
|
||||
foreach (var sink in result.DriftedSinks)
|
||||
{
|
||||
var sevOrder = severityOrder.GetValueOrDefault(sink.Severity ?? "info", 4);
|
||||
if (sevOrder > minSevOrder) continue;
|
||||
if (onlyIncreases && !sink.IsRiskIncrease) continue;
|
||||
|
||||
sinksTable.AddRow(
|
||||
FormatSeverity(sink.Severity),
|
||||
sink.SinkSymbol ?? "unknown",
|
||||
sink.CveId ?? "-",
|
||||
$"{sink.PreviousBucket ?? "N/A"} → {sink.CurrentBucket}",
|
||||
FormatDelta(sink.RiskDelta));
|
||||
}
|
||||
|
||||
console.Write(sinksTable);
|
||||
}
|
||||
|
||||
// Task: UI-021 - JSON output
|
||||
private static async Task WriteJsonOutputAsync(
|
||||
IAnsiConsole console,
|
||||
DriftResultDto result,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(result, DriftJsonOptions);
|
||||
console.WriteLine(json);
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
// Task: UI-022, UI-023 - SARIF output (placeholder)
|
||||
private static async Task WriteSarifOutputAsync(
|
||||
IAnsiConsole console,
|
||||
DriftResultDto result,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// TODO: Implement full SARIF 2.1.0 generation in DriftSarifGenerator
|
||||
var sarif = new
|
||||
{
|
||||
version = "2.1.0",
|
||||
schema = "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
|
||||
runs = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
tool = new
|
||||
{
|
||||
driver = new
|
||||
{
|
||||
name = "StellaOps Drift",
|
||||
version = "1.0.0",
|
||||
informationUri = "https://stellaops.io/docs/drift"
|
||||
}
|
||||
},
|
||||
results = result.DriftedSinks.Select(sink => new
|
||||
{
|
||||
ruleId = sink.CveId ?? $"drift-{sink.SinkSymbol}",
|
||||
level = MapSeverityToSarif(sink.Severity),
|
||||
message = new
|
||||
{
|
||||
text = $"Reachability changed: {sink.PreviousBucket ?? "N/A"} → {sink.CurrentBucket}"
|
||||
},
|
||||
locations = Array.Empty<object>()
|
||||
}).ToArray()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(sarif, DriftJsonOptions);
|
||||
console.WriteLine(json);
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
private static string FormatTrend(string trend) => trend switch
|
||||
{
|
||||
"increasing" => "[red]↑ Increasing[/]",
|
||||
"decreasing" => "[green]↓ Decreasing[/]",
|
||||
_ => "[dim]→ Stable[/]"
|
||||
};
|
||||
|
||||
private static string FormatDelta(int delta) => delta switch
|
||||
{
|
||||
> 0 => $"[red]+{delta}[/]",
|
||||
< 0 => $"[green]{delta}[/]",
|
||||
_ => "[dim]0[/]"
|
||||
};
|
||||
|
||||
private static string FormatSeverity(string? severity) => severity switch
|
||||
{
|
||||
"critical" => "[white on red] CRITICAL [/]",
|
||||
"high" => "[black on darkorange] HIGH [/]",
|
||||
"medium" => "[black on yellow] MEDIUM [/]",
|
||||
"low" => "[black on olive] LOW [/]",
|
||||
_ => "[dim] INFO [/]"
|
||||
};
|
||||
|
||||
private static string MapSeverityToSarif(string? severity) => severity switch
|
||||
{
|
||||
"critical" or "high" => "error",
|
||||
"medium" => "warning",
|
||||
_ => "note"
|
||||
};
|
||||
|
||||
// DTOs for drift output
|
||||
private sealed record DriftResultDto
|
||||
{
|
||||
public string Id { get; init; } = string.Empty;
|
||||
public string ComparedAt { get; init; } = string.Empty;
|
||||
public string BaseGraphId { get; init; } = string.Empty;
|
||||
public string HeadGraphId { get; init; } = string.Empty;
|
||||
public DriftSummaryDto Summary { get; init; } = new();
|
||||
public DriftedSinkDto[] DriftedSinks { get; init; } = Array.Empty<DriftedSinkDto>();
|
||||
}
|
||||
|
||||
private sealed record DriftSummaryDto
|
||||
{
|
||||
public int TotalSinks { get; init; }
|
||||
public int IncreasedReachability { get; init; }
|
||||
public int DecreasedReachability { get; init; }
|
||||
public int UnchangedReachability { get; init; }
|
||||
public int NewSinks { get; init; }
|
||||
public int RemovedSinks { get; init; }
|
||||
public string RiskTrend { get; init; } = "stable";
|
||||
public int NetRiskDelta { get; init; }
|
||||
}
|
||||
|
||||
private sealed record DriftedSinkDto
|
||||
{
|
||||
public string? SinkSymbol { get; init; }
|
||||
public string? CveId { get; init; }
|
||||
public string? Severity { get; init; }
|
||||
public string? PreviousBucket { get; init; }
|
||||
public string CurrentBucket { get; init; } = string.Empty;
|
||||
public bool IsRiskIncrease { get; init; }
|
||||
public int RiskDelta { get; init; }
|
||||
}
|
||||
}
|
||||
160
src/Cli/StellaOps.Cli/Commands/DriftCommandGroup.cs
Normal file
160
src/Cli/StellaOps.Cli/Commands/DriftCommandGroup.cs
Normal file
@@ -0,0 +1,160 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// DriftCommandGroup.cs
|
||||
// Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
|
||||
// Task: UI-019
|
||||
// Description: CLI command group for reachability drift detection.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.CommandLine;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Cli.Extensions;
|
||||
using Spectre.Console;
|
||||
|
||||
namespace StellaOps.Cli.Commands;
|
||||
|
||||
/// <summary>
|
||||
/// CLI command group for reachability drift detection.
|
||||
/// </summary>
|
||||
internal static class DriftCommandGroup
|
||||
{
|
||||
internal static Command BuildDriftCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var drift = new Command("drift", "Reachability drift detection operations.");
|
||||
|
||||
drift.Add(BuildDriftCompareCommand(services, verboseOption, cancellationToken));
|
||||
drift.Add(BuildDriftShowCommand(services, verboseOption, cancellationToken));
|
||||
|
||||
return drift;
|
||||
}
|
||||
|
||||
private static Command BuildDriftCompareCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var baseOption = new Option<string>("--base", new[] { "-b" })
|
||||
{
|
||||
Description = "Base scan/graph ID or commit SHA for comparison.",
|
||||
Required = true
|
||||
};
|
||||
|
||||
var headOption = new Option<string>("--head", new[] { "-h" })
|
||||
{
|
||||
Description = "Head scan/graph ID or commit SHA for comparison (defaults to latest)."
|
||||
};
|
||||
|
||||
var imageOption = new Option<string?>("--image", new[] { "-i" })
|
||||
{
|
||||
Description = "Container image reference (digest or tag)."
|
||||
};
|
||||
|
||||
var repoOption = new Option<string?>("--repo", new[] { "-r" })
|
||||
{
|
||||
Description = "Repository reference (owner/repo)."
|
||||
};
|
||||
|
||||
var outputOption = new Option<string>("--output", new[] { "-o" })
|
||||
{
|
||||
Description = "Output format: table (default), json, sarif."
|
||||
}.SetDefaultValue("table").FromAmong("table", "json", "sarif");
|
||||
|
||||
var severityOption = new Option<string>("--min-severity")
|
||||
{
|
||||
Description = "Minimum severity to include: critical, high, medium, low, info."
|
||||
}.SetDefaultValue("medium").FromAmong("critical", "high", "medium", "low", "info");
|
||||
|
||||
var onlyIncreasesOption = new Option<bool>("--only-increases")
|
||||
{
|
||||
Description = "Only show sinks with increased reachability (risk increases)."
|
||||
};
|
||||
|
||||
var command = new Command("compare", "Compare reachability between two scans.")
|
||||
{
|
||||
baseOption,
|
||||
headOption,
|
||||
imageOption,
|
||||
repoOption,
|
||||
outputOption,
|
||||
severityOption,
|
||||
onlyIncreasesOption,
|
||||
verboseOption
|
||||
};
|
||||
|
||||
command.SetAction(parseResult =>
|
||||
{
|
||||
var baseId = parseResult.GetValue(baseOption)!;
|
||||
var headId = parseResult.GetValue(headOption);
|
||||
var image = parseResult.GetValue(imageOption);
|
||||
var repo = parseResult.GetValue(repoOption);
|
||||
var output = parseResult.GetValue(outputOption)!;
|
||||
var minSeverity = parseResult.GetValue(severityOption)!;
|
||||
var onlyIncreases = parseResult.GetValue(onlyIncreasesOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleDriftCompareAsync(
|
||||
services,
|
||||
baseId,
|
||||
headId,
|
||||
image,
|
||||
repo,
|
||||
output,
|
||||
minSeverity,
|
||||
onlyIncreases,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
private static Command BuildDriftShowCommand(
|
||||
IServiceProvider services,
|
||||
Option<bool> verboseOption,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var idOption = new Option<string>("--id")
|
||||
{
|
||||
Description = "Drift result ID to display.",
|
||||
Required = true
|
||||
};
|
||||
|
||||
var outputOption = new Option<string>("--output", new[] { "-o" })
|
||||
{
|
||||
Description = "Output format: table (default), json, sarif."
|
||||
}.SetDefaultValue("table").FromAmong("table", "json", "sarif");
|
||||
|
||||
var expandPathsOption = new Option<bool>("--expand-paths")
|
||||
{
|
||||
Description = "Show full call paths instead of compressed view."
|
||||
};
|
||||
|
||||
var command = new Command("show", "Show details of a drift result.")
|
||||
{
|
||||
idOption,
|
||||
outputOption,
|
||||
expandPathsOption,
|
||||
verboseOption
|
||||
};
|
||||
|
||||
command.SetAction(parseResult =>
|
||||
{
|
||||
var id = parseResult.GetValue(idOption)!;
|
||||
var output = parseResult.GetValue(outputOption)!;
|
||||
var expandPaths = parseResult.GetValue(expandPathsOption);
|
||||
var verbose = parseResult.GetValue(verboseOption);
|
||||
|
||||
return CommandHandlers.HandleDriftShowAsync(
|
||||
services,
|
||||
id,
|
||||
output,
|
||||
expandPaths,
|
||||
verbose,
|
||||
cancellationToken);
|
||||
});
|
||||
|
||||
return command;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,384 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EpssEnrichmentJob.cs
|
||||
// Sprint: SPRINT_3413_0001_0001_epss_live_enrichment
|
||||
// Task: Task #1 - Implement EpssEnrichmentJob service
|
||||
// Description: Background job that enriches vulnerability instances with current EPSS scores.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Core.Epss;
|
||||
using StellaOps.Scanner.Storage.Epss;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Processing;
|
||||
|
||||
/// <summary>
|
||||
/// Options for the EPSS enrichment job.
|
||||
/// </summary>
|
||||
public sealed class EpssEnrichmentOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Configuration section name.
|
||||
/// </summary>
|
||||
public const string SectionName = "Epss:Enrichment";
|
||||
|
||||
/// <summary>
|
||||
/// Whether the enrichment job is enabled. Default: true.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Delay after EPSS ingestion before running enrichment. Default: 1 minute.
|
||||
/// </summary>
|
||||
public TimeSpan PostIngestDelay { get; set; } = TimeSpan.FromMinutes(1);
|
||||
|
||||
/// <summary>
|
||||
/// Batch size for processing vulnerability instances. Default: 1000.
|
||||
/// </summary>
|
||||
public int BatchSize { get; set; } = 1000;
|
||||
|
||||
/// <summary>
|
||||
/// High percentile threshold. Scores at or above this trigger CROSSED_HIGH. Default: 0.99.
|
||||
/// </summary>
|
||||
public double HighPercentile { get; set; } = 0.99;
|
||||
|
||||
/// <summary>
|
||||
/// High score threshold. Scores at or above this trigger priority elevation. Default: 0.5.
|
||||
/// </summary>
|
||||
public double HighScore { get; set; } = 0.5;
|
||||
|
||||
/// <summary>
|
||||
/// Big jump delta threshold. Score changes >= this trigger BIG_JUMP flag. Default: 0.10.
|
||||
/// </summary>
|
||||
public double BigJumpDelta { get; set; } = 0.10;
|
||||
|
||||
/// <summary>
|
||||
/// Critical percentile threshold. Default: 0.995 (top 0.5%).
|
||||
/// </summary>
|
||||
public double CriticalPercentile { get; set; } = 0.995;
|
||||
|
||||
/// <summary>
|
||||
/// Medium percentile threshold. Default: 0.90 (top 10%).
|
||||
/// </summary>
|
||||
public double MediumPercentile { get; set; } = 0.90;
|
||||
|
||||
/// <summary>
|
||||
/// Process only CVEs with specific change flags. Empty = process all.
|
||||
/// </summary>
|
||||
public EpssChangeFlags FlagsToProcess { get; set; } =
|
||||
EpssChangeFlags.NewScored |
|
||||
EpssChangeFlags.CrossedHigh |
|
||||
EpssChangeFlags.BigJumpUp |
|
||||
EpssChangeFlags.BigJumpDown;
|
||||
|
||||
/// <summary>
|
||||
/// Suppress signals on model version change. Default: true.
|
||||
/// </summary>
|
||||
public bool SuppressSignalsOnModelChange { get; set; } = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Background service that enriches vulnerability instances with current EPSS scores.
|
||||
/// Runs after EPSS ingestion to update existing findings with new priority bands.
|
||||
/// </summary>
|
||||
public sealed class EpssEnrichmentJob : BackgroundService
|
||||
{
|
||||
private readonly IEpssRepository _epssRepository;
|
||||
private readonly IEpssProvider _epssProvider;
|
||||
private readonly IEpssSignalPublisher _signalPublisher;
|
||||
private readonly IOptions<EpssEnrichmentOptions> _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<EpssEnrichmentJob> _logger;
|
||||
private readonly ActivitySource _activitySource = new("StellaOps.Scanner.EpssEnrichment");
|
||||
|
||||
// Event to trigger enrichment after ingestion
|
||||
private readonly SemaphoreSlim _enrichmentTrigger = new(0);
|
||||
|
||||
public EpssEnrichmentJob(
|
||||
IEpssRepository epssRepository,
|
||||
IEpssProvider epssProvider,
|
||||
IEpssSignalPublisher signalPublisher,
|
||||
IOptions<EpssEnrichmentOptions> options,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<EpssEnrichmentJob> logger)
|
||||
{
|
||||
_epssRepository = epssRepository ?? throw new ArgumentNullException(nameof(epssRepository));
|
||||
_epssProvider = epssProvider ?? throw new ArgumentNullException(nameof(epssProvider));
|
||||
_signalPublisher = signalPublisher ?? throw new ArgumentNullException(nameof(signalPublisher));
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
_logger.LogInformation("EPSS enrichment job started");
|
||||
|
||||
var opts = _options.Value;
|
||||
|
||||
if (!opts.Enabled)
|
||||
{
|
||||
_logger.LogInformation("EPSS enrichment job is disabled");
|
||||
return;
|
||||
}
|
||||
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Wait for enrichment trigger or cancellation
|
||||
await _enrichmentTrigger.WaitAsync(stoppingToken);
|
||||
|
||||
// Add delay after ingestion to ensure data is fully committed
|
||||
await Task.Delay(opts.PostIngestDelay, stoppingToken);
|
||||
|
||||
await EnrichAsync(stoppingToken);
|
||||
}
|
||||
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "EPSS enrichment job encountered an error");
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogInformation("EPSS enrichment job stopped");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Triggers the enrichment process. Called after EPSS data is ingested.
|
||||
/// </summary>
|
||||
public void TriggerEnrichment()
|
||||
{
|
||||
_enrichmentTrigger.Release();
|
||||
_logger.LogDebug("EPSS enrichment triggered");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Runs the enrichment process. Updates vulnerability instances with current EPSS scores.
|
||||
/// </summary>
|
||||
public async Task EnrichAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
using var activity = _activitySource.StartActivity("epss.enrich", ActivityKind.Internal);
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
var opts = _options.Value;
|
||||
|
||||
_logger.LogInformation("Starting EPSS enrichment");
|
||||
|
||||
try
|
||||
{
|
||||
// Get the latest model date
|
||||
var modelDate = await _epssProvider.GetLatestModelDateAsync(cancellationToken);
|
||||
if (!modelDate.HasValue)
|
||||
{
|
||||
_logger.LogWarning("No EPSS data available for enrichment");
|
||||
return;
|
||||
}
|
||||
|
||||
activity?.SetTag("epss.model_date", modelDate.Value.ToString("yyyy-MM-dd"));
|
||||
_logger.LogDebug("Using EPSS model date: {ModelDate}", modelDate.Value);
|
||||
|
||||
// Get CVEs with changes that need processing
|
||||
var changedCves = await GetChangedCvesAsync(modelDate.Value, opts.FlagsToProcess, cancellationToken);
|
||||
|
||||
if (changedCves.Count == 0)
|
||||
{
|
||||
_logger.LogDebug("No CVE changes to process");
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogInformation("Processing {Count} CVEs with EPSS changes", changedCves.Count);
|
||||
activity?.SetTag("epss.changed_cve_count", changedCves.Count);
|
||||
|
||||
var totalUpdated = 0;
|
||||
var totalBandChanges = 0;
|
||||
|
||||
// Process in batches
|
||||
foreach (var batch in changedCves.Chunk(opts.BatchSize))
|
||||
{
|
||||
var (updated, bandChanges) = await ProcessBatchAsync(
|
||||
batch,
|
||||
modelDate.Value,
|
||||
cancellationToken);
|
||||
|
||||
totalUpdated += updated;
|
||||
totalBandChanges += bandChanges;
|
||||
}
|
||||
|
||||
stopwatch.Stop();
|
||||
|
||||
_logger.LogInformation(
|
||||
"EPSS enrichment completed: updated={Updated}, bandChanges={BandChanges}, duration={Duration}ms",
|
||||
totalUpdated,
|
||||
totalBandChanges,
|
||||
stopwatch.ElapsedMilliseconds);
|
||||
|
||||
activity?.SetTag("epss.updated_count", totalUpdated);
|
||||
activity?.SetTag("epss.band_change_count", totalBandChanges);
|
||||
activity?.SetTag("epss.duration_ms", stopwatch.ElapsedMilliseconds);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "EPSS enrichment failed");
|
||||
activity?.SetStatus(ActivityStatusCode.Error, ex.Message);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<IReadOnlyList<EpssChangeRecord>> GetChangedCvesAsync(
|
||||
DateOnly modelDate,
|
||||
EpssChangeFlags flags,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Query epss_changes table for CVEs with matching flags for the model date (Task #4)
|
||||
_logger.LogDebug("Querying EPSS changes for model date {ModelDate} with flags {Flags}", modelDate, flags);
|
||||
|
||||
var changes = await _epssRepository.GetChangesAsync(modelDate, flags, cancellationToken: cancellationToken);
|
||||
|
||||
_logger.LogDebug("Found {Count} EPSS changes matching flags {Flags}", changes.Count, flags);
|
||||
|
||||
return changes;
|
||||
}
|
||||
|
||||
private async Task<(int Updated, int BandChanges)> ProcessBatchAsync(
|
||||
EpssChangeRecord[] batch,
|
||||
DateOnly modelDate,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var opts = _options.Value;
|
||||
var updated = 0;
|
||||
var bandChanges = 0;
|
||||
|
||||
// Get current EPSS scores for all CVEs in batch
|
||||
var cveIds = batch.Select(c => c.CveId).ToList();
|
||||
var epssResult = await _epssProvider.GetCurrentBatchAsync(cveIds, cancellationToken);
|
||||
|
||||
foreach (var change in batch)
|
||||
{
|
||||
var evidence = epssResult.Found.FirstOrDefault(e =>
|
||||
string.Equals(e.CveId, change.CveId, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
if (evidence is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var previousBand = change.PreviousBand;
|
||||
var newBand = ComputePriorityBand(evidence.Percentile, opts);
|
||||
|
||||
// Check if band changed
|
||||
if (previousBand != newBand)
|
||||
{
|
||||
bandChanges++;
|
||||
|
||||
// Emit vuln.priority.changed event
|
||||
await EmitPriorityChangedEventAsync(
|
||||
change.CveId,
|
||||
previousBand,
|
||||
newBand,
|
||||
evidence,
|
||||
cancellationToken);
|
||||
}
|
||||
|
||||
updated++;
|
||||
}
|
||||
|
||||
return (updated, bandChanges);
|
||||
}
|
||||
|
||||
private static EpssPriorityBand ComputePriorityBand(double percentile, EpssEnrichmentOptions opts)
|
||||
{
|
||||
if (percentile >= opts.CriticalPercentile)
|
||||
{
|
||||
return EpssPriorityBand.Critical;
|
||||
}
|
||||
|
||||
if (percentile >= opts.HighPercentile)
|
||||
{
|
||||
return EpssPriorityBand.High;
|
||||
}
|
||||
|
||||
if (percentile >= opts.MediumPercentile)
|
||||
{
|
||||
return EpssPriorityBand.Medium;
|
||||
}
|
||||
|
||||
return EpssPriorityBand.Low;
|
||||
}
|
||||
|
||||
private Task EmitPriorityChangedEventAsync(
|
||||
string cveId,
|
||||
EpssPriorityBand previousBand,
|
||||
EpssPriorityBand newBand,
|
||||
EpssEvidence evidence,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Task #6: Emit `vuln.priority.changed` event via signal publisher
|
||||
_logger.LogDebug(
|
||||
"Priority changed: {CveId} {PreviousBand} -> {NewBand} (score={Score:F4}, percentile={Percentile:F4})",
|
||||
cveId,
|
||||
previousBand,
|
||||
newBand,
|
||||
evidence.Score,
|
||||
evidence.Percentile);
|
||||
|
||||
// Publish priority changed event (Task #6)
|
||||
var result = await _signalPublisher.PublishPriorityChangedAsync(
|
||||
Guid.Empty, // Tenant ID would come from context
|
||||
cveId,
|
||||
previousBand.ToString(),
|
||||
newBand.ToString(),
|
||||
evidence.Score,
|
||||
evidence.ModelDate,
|
||||
cancellationToken);
|
||||
|
||||
if (!result.Success)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Failed to publish priority changed event for {CveId}: {Error}",
|
||||
cveId,
|
||||
result.Error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Record representing an EPSS change that needs processing.
|
||||
/// </summary>
|
||||
public sealed record EpssChangeRecord
|
||||
{
|
||||
/// <summary>
|
||||
/// CVE identifier.
|
||||
/// </summary>
|
||||
public required string CveId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Change flags indicating what changed.
|
||||
/// </summary>
|
||||
public EpssChangeFlags Flags { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Previous EPSS score (if available).
|
||||
/// </summary>
|
||||
public double? PreviousScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// New EPSS score.
|
||||
/// </summary>
|
||||
public double NewScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Previous priority band (if available).
|
||||
/// </summary>
|
||||
public EpssPriorityBand PreviousBand { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Model date for this change.
|
||||
/// </summary>
|
||||
public DateOnly ModelDate { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,205 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EpssEnrichmentStageExecutor.cs
|
||||
// Sprint: SPRINT_3410_0002_0001_epss_scanner_integration
|
||||
// Task: EPSS-SCAN-006
|
||||
// Description: Scan stage executor that enriches findings with EPSS scores.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Epss;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Processing;
|
||||
|
||||
/// <summary>
|
||||
/// Scan stage executor that enriches vulnerability findings with EPSS scores.
|
||||
/// Attaches immutable EPSS evidence to each CVE at scan time.
|
||||
/// </summary>
|
||||
public sealed class EpssEnrichmentStageExecutor : IScanStageExecutor
|
||||
{
|
||||
private readonly IEpssProvider _epssProvider;
|
||||
private readonly ILogger<EpssEnrichmentStageExecutor> _logger;
|
||||
|
||||
public EpssEnrichmentStageExecutor(
|
||||
IEpssProvider epssProvider,
|
||||
ILogger<EpssEnrichmentStageExecutor> logger)
|
||||
{
|
||||
_epssProvider = epssProvider ?? throw new ArgumentNullException(nameof(epssProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public string StageName => ScanStageNames.EpssEnrichment;
|
||||
|
||||
public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
// Check if EPSS data is available
|
||||
var isAvailable = await _epssProvider.IsAvailableAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (!isAvailable)
|
||||
{
|
||||
_logger.LogWarning("EPSS data not available; skipping EPSS enrichment for job {JobId}", context.JobId);
|
||||
return;
|
||||
}
|
||||
|
||||
// Get CVE IDs from findings
|
||||
var cveIds = ExtractCveIds(context);
|
||||
if (cveIds.Count == 0)
|
||||
{
|
||||
_logger.LogDebug("No CVE IDs found in findings for job {JobId}; skipping EPSS enrichment", context.JobId);
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Enriching {CveCount} CVEs with EPSS scores for job {JobId}",
|
||||
cveIds.Count,
|
||||
context.JobId);
|
||||
|
||||
// Fetch EPSS scores in batch
|
||||
var epssResult = await _epssProvider.GetCurrentBatchAsync(cveIds, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug(
|
||||
"EPSS lookup: found={Found}, notFound={NotFound}, timeMs={TimeMs}, fromCache={FromCache}",
|
||||
epssResult.Found.Count,
|
||||
epssResult.NotFound.Count,
|
||||
epssResult.LookupTimeMs,
|
||||
epssResult.PartiallyFromCache);
|
||||
|
||||
// Store EPSS evidence in analysis context
|
||||
var epssMap = epssResult.Found.ToDictionary(
|
||||
e => e.CveId,
|
||||
e => e,
|
||||
StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
context.Analysis.Set(ScanAnalysisKeys.EpssEvidence, epssMap);
|
||||
context.Analysis.Set(ScanAnalysisKeys.EpssModelDate, epssResult.ModelDate);
|
||||
context.Analysis.Set(ScanAnalysisKeys.EpssNotFoundCves, epssResult.NotFound.ToList());
|
||||
|
||||
_logger.LogInformation(
|
||||
"EPSS enrichment completed for job {JobId}: {Found}/{Total} CVEs enriched, model date {ModelDate}",
|
||||
context.JobId,
|
||||
epssMap.Count,
|
||||
cveIds.Count,
|
||||
epssResult.ModelDate);
|
||||
}
|
||||
|
||||
private static HashSet<string> ExtractCveIds(ScanJobContext context)
|
||||
{
|
||||
var cveIds = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// Extract from OS package analyzer results
|
||||
if (context.Analysis.TryGet<Dictionary<string, object>>(ScanAnalysisKeys.OsPackageAnalyzers, out var osResults) && osResults is not null)
|
||||
{
|
||||
foreach (var analyzerResult in osResults.Values)
|
||||
{
|
||||
ExtractCvesFromAnalyzerResult(analyzerResult, cveIds);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract from language analyzer results
|
||||
if (context.Analysis.TryGet<Dictionary<string, object>>(ScanAnalysisKeys.LanguagePackageAnalyzers, out var langResults) && langResults is not null)
|
||||
{
|
||||
foreach (var analyzerResult in langResults.Values)
|
||||
{
|
||||
ExtractCvesFromAnalyzerResult(analyzerResult, cveIds);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract from consolidated findings if available
|
||||
if (context.Analysis.TryGet<IEnumerable<object>>(ScanAnalysisKeys.ConsolidatedFindings, out var findings) && findings is not null)
|
||||
{
|
||||
foreach (var finding in findings)
|
||||
{
|
||||
ExtractCvesFromFinding(finding, cveIds);
|
||||
}
|
||||
}
|
||||
|
||||
return cveIds;
|
||||
}
|
||||
|
||||
private static void ExtractCvesFromAnalyzerResult(object analyzerResult, HashSet<string> cveIds)
|
||||
{
|
||||
// Use reflection to extract CVE IDs from various analyzer result types
|
||||
// This handles OSPackageAnalyzerResult, LanguagePackageAnalyzerResult, etc.
|
||||
var resultType = analyzerResult.GetType();
|
||||
|
||||
// Try to get Vulnerabilities property
|
||||
var vulnsProperty = resultType.GetProperty("Vulnerabilities");
|
||||
if (vulnsProperty?.GetValue(analyzerResult) is IEnumerable<object> vulns)
|
||||
{
|
||||
foreach (var vuln in vulns)
|
||||
{
|
||||
ExtractCvesFromFinding(vuln, cveIds);
|
||||
}
|
||||
}
|
||||
|
||||
// Try to get Findings property
|
||||
var findingsProperty = resultType.GetProperty("Findings");
|
||||
if (findingsProperty?.GetValue(analyzerResult) is IEnumerable<object> findingsList)
|
||||
{
|
||||
foreach (var finding in findingsList)
|
||||
{
|
||||
ExtractCvesFromFinding(finding, cveIds);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void ExtractCvesFromFinding(object finding, HashSet<string> cveIds)
|
||||
{
|
||||
var findingType = finding.GetType();
|
||||
|
||||
// Try CveId property
|
||||
var cveIdProperty = findingType.GetProperty("CveId");
|
||||
if (cveIdProperty?.GetValue(finding) is string cveId && !string.IsNullOrWhiteSpace(cveId))
|
||||
{
|
||||
cveIds.Add(cveId);
|
||||
return;
|
||||
}
|
||||
|
||||
// Try VulnerabilityId property (some findings use this)
|
||||
var vulnIdProperty = findingType.GetProperty("VulnerabilityId");
|
||||
if (vulnIdProperty?.GetValue(finding) is string vulnId &&
|
||||
!string.IsNullOrWhiteSpace(vulnId) &&
|
||||
vulnId.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
cveIds.Add(vulnId);
|
||||
return;
|
||||
}
|
||||
|
||||
// Try Identifiers collection
|
||||
var identifiersProperty = findingType.GetProperty("Identifiers");
|
||||
if (identifiersProperty?.GetValue(finding) is IEnumerable<object> identifiers)
|
||||
{
|
||||
foreach (var identifier in identifiers)
|
||||
{
|
||||
var idValue = identifier.ToString();
|
||||
if (!string.IsNullOrWhiteSpace(idValue) &&
|
||||
idValue.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
cveIds.Add(idValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Well-known keys for EPSS-related analysis data.
|
||||
/// </summary>
|
||||
public static partial class ScanAnalysisKeys
|
||||
{
|
||||
/// <summary>
|
||||
/// Dictionary of CVE ID to EpssEvidence for enriched findings.
|
||||
/// </summary>
|
||||
public const string EpssEvidence = "epss.evidence";
|
||||
|
||||
/// <summary>
|
||||
/// The EPSS model date used for enrichment.
|
||||
/// </summary>
|
||||
public const string EpssModelDate = "epss.model_date";
|
||||
|
||||
/// <summary>
|
||||
/// List of CVE IDs that were not found in EPSS data.
|
||||
/// </summary>
|
||||
public const string EpssNotFoundCves = "epss.not_found";
|
||||
}
|
||||
@@ -67,6 +67,7 @@ public sealed class EpssIngestOptions
|
||||
public sealed class EpssIngestJob : BackgroundService
|
||||
{
|
||||
private readonly IEpssRepository _repository;
|
||||
private readonly IEpssRawRepository? _rawRepository;
|
||||
private readonly EpssOnlineSource _onlineSource;
|
||||
private readonly EpssBundleSource _bundleSource;
|
||||
private readonly EpssCsvStreamParser _parser;
|
||||
@@ -82,9 +83,11 @@ public sealed class EpssIngestJob : BackgroundService
|
||||
EpssCsvStreamParser parser,
|
||||
IOptions<EpssIngestOptions> options,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<EpssIngestJob> logger)
|
||||
ILogger<EpssIngestJob> logger,
|
||||
IEpssRawRepository? rawRepository = null)
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_rawRepository = rawRepository; // Optional - raw storage for replay capability
|
||||
_onlineSource = onlineSource ?? throw new ArgumentNullException(nameof(onlineSource));
|
||||
_bundleSource = bundleSource ?? throw new ArgumentNullException(nameof(bundleSource));
|
||||
_parser = parser ?? throw new ArgumentNullException(nameof(parser));
|
||||
@@ -186,6 +189,18 @@ public sealed class EpssIngestJob : BackgroundService
|
||||
session,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Store raw payload for replay capability (Sprint: SPRINT_3413_0001_0001, Task: R2)
|
||||
if (_rawRepository is not null)
|
||||
{
|
||||
await StoreRawPayloadAsync(
|
||||
importRun.ImportRunId,
|
||||
sourceFile.SourceUri,
|
||||
modelDate,
|
||||
session,
|
||||
fileContent.Length,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
// Mark success
|
||||
await _repository.MarkImportSucceededAsync(
|
||||
importRun.ImportRunId,
|
||||
@@ -279,4 +294,69 @@ public sealed class EpssIngestJob : BackgroundService
|
||||
var hash = System.Security.Cryptography.SHA256.HashData(content);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Stores raw EPSS payload for deterministic replay capability.
|
||||
/// Sprint: SPRINT_3413_0001_0001, Task: R2
|
||||
/// </summary>
|
||||
private async Task StoreRawPayloadAsync(
|
||||
Guid importRunId,
|
||||
string sourceUri,
|
||||
DateOnly modelDate,
|
||||
EpssParsedSession session,
|
||||
long compressedSize,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (_rawRepository is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// Convert parsed rows to JSON array for raw storage
|
||||
var payload = System.Text.Json.JsonSerializer.Serialize(
|
||||
session.Rows.Select(r => new
|
||||
{
|
||||
cve = r.CveId,
|
||||
epss = r.Score,
|
||||
percentile = r.Percentile
|
||||
}),
|
||||
new System.Text.Json.JsonSerializerOptions { WriteIndented = false });
|
||||
|
||||
var payloadBytes = System.Text.Encoding.UTF8.GetBytes(payload);
|
||||
var payloadSha256 = System.Security.Cryptography.SHA256.HashData(payloadBytes);
|
||||
|
||||
var raw = new EpssRaw
|
||||
{
|
||||
SourceUri = sourceUri,
|
||||
AsOfDate = modelDate,
|
||||
Payload = payload,
|
||||
PayloadSha256 = payloadSha256,
|
||||
HeaderComment = session.HeaderComment,
|
||||
ModelVersion = session.ModelVersionTag,
|
||||
PublishedDate = session.PublishedDate,
|
||||
RowCount = session.RowCount,
|
||||
CompressedSize = compressedSize,
|
||||
DecompressedSize = payloadBytes.LongLength,
|
||||
ImportRunId = importRunId
|
||||
};
|
||||
|
||||
await _rawRepository.CreateAsync(raw, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Stored raw EPSS payload: modelDate={ModelDate}, rows={RowCount}, size={Size}",
|
||||
modelDate,
|
||||
session.RowCount,
|
||||
payloadBytes.Length);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Log but don't fail ingestion if raw storage fails
|
||||
_logger.LogWarning(
|
||||
ex,
|
||||
"Failed to store raw EPSS payload for {ModelDate}; ingestion will continue",
|
||||
modelDate);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
505
src/Scanner/StellaOps.Scanner.Worker/Processing/EpssSignalJob.cs
Normal file
505
src/Scanner/StellaOps.Scanner.Worker/Processing/EpssSignalJob.cs
Normal file
@@ -0,0 +1,505 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EpssSignalJob.cs
|
||||
// Sprint: SPRINT_3413_0001_0001_epss_live_enrichment
|
||||
// Tasks: S5-S10 - Signal generation service
|
||||
// Description: Background job that generates tenant-scoped EPSS signals.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Diagnostics;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Core.Epss;
|
||||
using StellaOps.Scanner.Storage.Epss;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Processing;
|
||||
|
||||
/// <summary>
|
||||
/// Options for the EPSS signal generation job.
|
||||
/// </summary>
|
||||
public sealed class EpssSignalOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Configuration section name.
|
||||
/// </summary>
|
||||
public const string SectionName = "Epss:Signal";
|
||||
|
||||
/// <summary>
|
||||
/// Whether the signal job is enabled. Default: true.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Delay after enrichment before generating signals. Default: 30 seconds.
|
||||
/// </summary>
|
||||
public TimeSpan PostEnrichmentDelay { get; set; } = TimeSpan.FromSeconds(30);
|
||||
|
||||
/// <summary>
|
||||
/// Batch size for signal generation. Default: 500.
|
||||
/// </summary>
|
||||
public int BatchSize { get; set; } = 500;
|
||||
|
||||
/// <summary>
|
||||
/// Signal retention days. Default: 90.
|
||||
/// </summary>
|
||||
public int RetentionDays { get; set; } = 90;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// EPSS signal event types.
|
||||
/// </summary>
|
||||
public static class EpssSignalEventTypes
|
||||
{
|
||||
/// <summary>
|
||||
/// Significant score increase (delta >= threshold).
|
||||
/// </summary>
|
||||
public const string RiskSpike = "RISK_SPIKE";
|
||||
|
||||
/// <summary>
|
||||
/// Priority band change (e.g., MEDIUM -> HIGH).
|
||||
/// </summary>
|
||||
public const string BandChange = "BAND_CHANGE";
|
||||
|
||||
/// <summary>
|
||||
/// New CVE scored for the first time.
|
||||
/// </summary>
|
||||
public const string NewHigh = "NEW_HIGH";
|
||||
|
||||
/// <summary>
|
||||
/// CVE dropped from HIGH/CRITICAL to LOW.
|
||||
/// </summary>
|
||||
public const string DroppedLow = "DROPPED_LOW";
|
||||
|
||||
/// <summary>
|
||||
/// EPSS model version changed (summary event).
|
||||
/// </summary>
|
||||
public const string ModelUpdated = "MODEL_UPDATED";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Background service that generates tenant-scoped EPSS signals.
|
||||
/// Only generates signals for CVEs that are observed in tenant's inventory.
|
||||
/// </summary>
|
||||
public sealed class EpssSignalJob : BackgroundService
|
||||
{
|
||||
private readonly IEpssRepository _epssRepository;
|
||||
private readonly IEpssSignalRepository _signalRepository;
|
||||
private readonly IObservedCveRepository _observedCveRepository;
|
||||
private readonly IEpssSignalPublisher _signalPublisher;
|
||||
private readonly IEpssProvider _epssProvider;
|
||||
private readonly IOptions<EpssSignalOptions> _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<EpssSignalJob> _logger;
|
||||
private readonly ActivitySource _activitySource = new("StellaOps.Scanner.EpssSignal");
|
||||
|
||||
// Trigger for signal generation
|
||||
private readonly SemaphoreSlim _signalTrigger = new(0);
|
||||
|
||||
// Track last processed model date to detect version changes
|
||||
private string? _lastModelVersion;
|
||||
|
||||
public EpssSignalJob(
|
||||
IEpssRepository epssRepository,
|
||||
IEpssSignalRepository signalRepository,
|
||||
IObservedCveRepository observedCveRepository,
|
||||
IEpssSignalPublisher signalPublisher,
|
||||
IEpssProvider epssProvider,
|
||||
IOptions<EpssSignalOptions> options,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<EpssSignalJob> logger)
|
||||
{
|
||||
_epssRepository = epssRepository ?? throw new ArgumentNullException(nameof(epssRepository));
|
||||
_signalRepository = signalRepository ?? throw new ArgumentNullException(nameof(signalRepository));
|
||||
_observedCveRepository = observedCveRepository ?? throw new ArgumentNullException(nameof(observedCveRepository));
|
||||
_signalPublisher = signalPublisher ?? throw new ArgumentNullException(nameof(signalPublisher));
|
||||
_epssProvider = epssProvider ?? throw new ArgumentNullException(nameof(epssProvider));
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
_logger.LogInformation("EPSS signal job started");
|
||||
|
||||
var opts = _options.Value;
|
||||
|
||||
if (!opts.Enabled)
|
||||
{
|
||||
_logger.LogInformation("EPSS signal job is disabled");
|
||||
return;
|
||||
}
|
||||
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Wait for signal trigger or cancellation
|
||||
await _signalTrigger.WaitAsync(stoppingToken);
|
||||
|
||||
// Add delay after enrichment to ensure data consistency
|
||||
await Task.Delay(opts.PostEnrichmentDelay, stoppingToken);
|
||||
|
||||
await GenerateSignalsAsync(stoppingToken);
|
||||
|
||||
// Periodic pruning of old signals
|
||||
await _signalRepository.PruneAsync(opts.RetentionDays, stoppingToken);
|
||||
}
|
||||
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "EPSS signal job encountered an error");
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogInformation("EPSS signal job stopped");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Triggers signal generation. Called after EPSS enrichment completes.
|
||||
/// </summary>
|
||||
public void TriggerSignalGeneration()
|
||||
{
|
||||
_signalTrigger.Release();
|
||||
_logger.LogDebug("EPSS signal generation triggered");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates signals for all tenants based on EPSS changes.
|
||||
/// </summary>
|
||||
public async Task GenerateSignalsAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
using var activity = _activitySource.StartActivity("epss.signal.generate", ActivityKind.Internal);
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
var opts = _options.Value;
|
||||
|
||||
_logger.LogInformation("Starting EPSS signal generation");
|
||||
|
||||
try
|
||||
{
|
||||
// Get current model date
|
||||
var modelDate = await _epssProvider.GetLatestModelDateAsync(cancellationToken);
|
||||
if (!modelDate.HasValue)
|
||||
{
|
||||
_logger.LogWarning("No EPSS data available for signal generation");
|
||||
return;
|
||||
}
|
||||
|
||||
activity?.SetTag("epss.model_date", modelDate.Value.ToString("yyyy-MM-dd"));
|
||||
|
||||
// Check for model version change (S7)
|
||||
var currentModelVersion = await GetCurrentModelVersionAsync(modelDate.Value, cancellationToken);
|
||||
var isModelChange = _lastModelVersion is not null &&
|
||||
!string.Equals(_lastModelVersion, currentModelVersion, StringComparison.Ordinal);
|
||||
|
||||
if (isModelChange)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"EPSS model version changed: {OldVersion} -> {NewVersion}",
|
||||
_lastModelVersion,
|
||||
currentModelVersion);
|
||||
}
|
||||
|
||||
_lastModelVersion = currentModelVersion;
|
||||
|
||||
// Get changes from epss_changes table
|
||||
var changes = await GetEpssChangesAsync(modelDate.Value, cancellationToken);
|
||||
if (changes.Count == 0)
|
||||
{
|
||||
_logger.LogDebug("No EPSS changes to process for signals");
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogInformation("Processing {Count} EPSS changes for signal generation", changes.Count);
|
||||
activity?.SetTag("epss.change_count", changes.Count);
|
||||
|
||||
var totalSignals = 0;
|
||||
var filteredCount = 0;
|
||||
|
||||
// Get all active tenants (S6)
|
||||
var activeTenants = await _observedCveRepository.GetActiveTenantsAsync(cancellationToken);
|
||||
|
||||
if (activeTenants.Count == 0)
|
||||
{
|
||||
_logger.LogDebug("No active tenants found; using default tenant");
|
||||
activeTenants = new[] { Guid.Empty };
|
||||
}
|
||||
|
||||
// For each tenant, filter changes to only observed CVEs
|
||||
foreach (var tenantId in activeTenants)
|
||||
{
|
||||
// Get CVE IDs from changes
|
||||
var changeCveIds = changes.Select(c => c.CveId).Distinct().ToList();
|
||||
|
||||
// Filter to only observed CVEs for this tenant (S6)
|
||||
var observedCves = await _observedCveRepository.FilterObservedAsync(
|
||||
tenantId,
|
||||
changeCveIds,
|
||||
cancellationToken);
|
||||
|
||||
var tenantChanges = changes
|
||||
.Where(c => observedCves.Contains(c.CveId))
|
||||
.ToArray();
|
||||
|
||||
if (tenantChanges.Length == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
filteredCount += changes.Length - tenantChanges.Length;
|
||||
|
||||
foreach (var batch in tenantChanges.Chunk(opts.BatchSize))
|
||||
{
|
||||
var signals = GenerateSignalsForBatch(
|
||||
batch,
|
||||
tenantId,
|
||||
modelDate.Value,
|
||||
currentModelVersion,
|
||||
isModelChange);
|
||||
|
||||
if (signals.Count > 0)
|
||||
{
|
||||
// Store signals in database
|
||||
var created = await _signalRepository.CreateBulkAsync(signals, cancellationToken);
|
||||
totalSignals += created;
|
||||
|
||||
// Publish signals to notification system (S9)
|
||||
var published = await _signalPublisher.PublishBatchAsync(signals, cancellationToken);
|
||||
_logger.LogDebug(
|
||||
"Published {Published}/{Total} EPSS signals for tenant {TenantId}",
|
||||
published,
|
||||
signals.Count,
|
||||
tenantId);
|
||||
}
|
||||
}
|
||||
|
||||
// If model changed, emit summary signal per tenant (S8)
|
||||
if (isModelChange)
|
||||
{
|
||||
await EmitModelUpdatedSignalAsync(
|
||||
tenantId,
|
||||
modelDate.Value,
|
||||
_lastModelVersion!,
|
||||
currentModelVersion!,
|
||||
tenantChanges.Length,
|
||||
cancellationToken);
|
||||
totalSignals++;
|
||||
}
|
||||
}
|
||||
|
||||
stopwatch.Stop();
|
||||
|
||||
_logger.LogInformation(
|
||||
"EPSS signal generation completed: signals={SignalCount}, changes={ChangeCount}, filtered={FilteredCount}, tenants={TenantCount}, duration={Duration}ms",
|
||||
totalSignals,
|
||||
changes.Count,
|
||||
filteredCount,
|
||||
activeTenants.Count,
|
||||
stopwatch.ElapsedMilliseconds);
|
||||
|
||||
activity?.SetTag("epss.signal_count", totalSignals);
|
||||
activity?.SetTag("epss.filtered_count", filteredCount);
|
||||
activity?.SetTag("epss.tenant_count", activeTenants.Count);
|
||||
activity?.SetTag("epss.duration_ms", stopwatch.ElapsedMilliseconds);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "EPSS signal generation failed");
|
||||
activity?.SetStatus(ActivityStatusCode.Error, ex.Message);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private IReadOnlyList<EpssSignal> GenerateSignalsForBatch(
|
||||
EpssChangeRecord[] changes,
|
||||
Guid tenantId,
|
||||
DateOnly modelDate,
|
||||
string? modelVersion,
|
||||
bool isModelChange)
|
||||
{
|
||||
var signals = new List<EpssSignal>();
|
||||
|
||||
foreach (var change in changes)
|
||||
{
|
||||
// Skip generating individual signals on model change day if suppression is enabled
|
||||
// (would check tenant config in production)
|
||||
if (isModelChange && ShouldSuppressOnModelChange(change))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var eventType = DetermineEventType(change);
|
||||
if (string.IsNullOrEmpty(eventType))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var dedupeKey = EpssExplainHashCalculator.ComputeDedupeKey(
|
||||
modelDate,
|
||||
change.CveId,
|
||||
eventType,
|
||||
change.PreviousBand.ToString(),
|
||||
ComputeNewBand(change).ToString());
|
||||
|
||||
var explainHash = EpssExplainHashCalculator.ComputeExplainHash(
|
||||
modelDate,
|
||||
change.CveId,
|
||||
eventType,
|
||||
change.PreviousBand.ToString(),
|
||||
ComputeNewBand(change).ToString(),
|
||||
change.NewScore,
|
||||
0, // Percentile would come from EPSS data
|
||||
modelVersion);
|
||||
|
||||
var payload = JsonSerializer.Serialize(new
|
||||
{
|
||||
cveId = change.CveId,
|
||||
oldScore = change.PreviousScore,
|
||||
newScore = change.NewScore,
|
||||
oldBand = change.PreviousBand.ToString(),
|
||||
newBand = ComputeNewBand(change).ToString(),
|
||||
flags = change.Flags.ToString(),
|
||||
modelVersion
|
||||
});
|
||||
|
||||
signals.Add(new EpssSignal
|
||||
{
|
||||
TenantId = tenantId,
|
||||
ModelDate = modelDate,
|
||||
CveId = change.CveId,
|
||||
EventType = eventType,
|
||||
RiskBand = ComputeNewBand(change).ToString(),
|
||||
EpssScore = change.NewScore,
|
||||
EpssDelta = change.NewScore - (change.PreviousScore ?? 0),
|
||||
IsModelChange = isModelChange,
|
||||
ModelVersion = modelVersion,
|
||||
DedupeKey = dedupeKey,
|
||||
ExplainHash = explainHash,
|
||||
Payload = payload
|
||||
});
|
||||
}
|
||||
|
||||
return signals;
|
||||
}
|
||||
|
||||
private static string? DetermineEventType(EpssChangeRecord change)
|
||||
{
|
||||
if (change.Flags.HasFlag(EpssChangeFlags.NewScored))
|
||||
{
|
||||
return EpssSignalEventTypes.NewHigh;
|
||||
}
|
||||
|
||||
if (change.Flags.HasFlag(EpssChangeFlags.CrossedHigh))
|
||||
{
|
||||
return EpssSignalEventTypes.BandChange;
|
||||
}
|
||||
|
||||
if (change.Flags.HasFlag(EpssChangeFlags.BigJumpUp))
|
||||
{
|
||||
return EpssSignalEventTypes.RiskSpike;
|
||||
}
|
||||
|
||||
if (change.Flags.HasFlag(EpssChangeFlags.DroppedLow))
|
||||
{
|
||||
return EpssSignalEventTypes.DroppedLow;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static EpssPriorityBand ComputeNewBand(EpssChangeRecord change)
|
||||
{
|
||||
// Simplified band calculation - would use EpssPriorityCalculator in production
|
||||
if (change.NewScore >= 0.5)
|
||||
{
|
||||
return EpssPriorityBand.Critical;
|
||||
}
|
||||
|
||||
if (change.NewScore >= 0.2)
|
||||
{
|
||||
return EpssPriorityBand.High;
|
||||
}
|
||||
|
||||
if (change.NewScore >= 0.05)
|
||||
{
|
||||
return EpssPriorityBand.Medium;
|
||||
}
|
||||
|
||||
return EpssPriorityBand.Low;
|
||||
}
|
||||
|
||||
private static bool ShouldSuppressOnModelChange(EpssChangeRecord change)
|
||||
{
|
||||
// Suppress RISK_SPIKE and BAND_CHANGE on model change days to avoid alert storms
|
||||
return change.Flags.HasFlag(EpssChangeFlags.BigJumpUp) ||
|
||||
change.Flags.HasFlag(EpssChangeFlags.BigJumpDown) ||
|
||||
change.Flags.HasFlag(EpssChangeFlags.CrossedHigh);
|
||||
}
|
||||
|
||||
private async Task<string?> GetCurrentModelVersionAsync(DateOnly modelDate, CancellationToken cancellationToken)
|
||||
{
|
||||
// Would query from epss_import_run or epss_raw table
|
||||
// For now, return a placeholder based on date
|
||||
return $"v{modelDate:yyyy.MM.dd}";
|
||||
}
|
||||
|
||||
private async Task<IReadOnlyList<EpssChangeRecord>> GetEpssChangesAsync(
|
||||
DateOnly modelDate,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// TODO: Implement repository method to get changes from epss_changes table
|
||||
// For now, return empty list
|
||||
return Array.Empty<EpssChangeRecord>();
|
||||
}
|
||||
|
||||
private async Task EmitModelUpdatedSignalAsync(
|
||||
Guid tenantId,
|
||||
DateOnly modelDate,
|
||||
string oldVersion,
|
||||
string newVersion,
|
||||
int affectedCveCount,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var payload = JsonSerializer.Serialize(new
|
||||
{
|
||||
oldVersion,
|
||||
newVersion,
|
||||
affectedCveCount,
|
||||
suppressedSignals = true
|
||||
});
|
||||
|
||||
var signal = new EpssSignal
|
||||
{
|
||||
TenantId = tenantId,
|
||||
ModelDate = modelDate,
|
||||
CveId = "MODEL_UPDATE",
|
||||
EventType = EpssSignalEventTypes.ModelUpdated,
|
||||
IsModelChange = true,
|
||||
ModelVersion = newVersion,
|
||||
DedupeKey = $"{modelDate:yyyy-MM-dd}:MODEL_UPDATE:{oldVersion}->{newVersion}",
|
||||
ExplainHash = EpssExplainHashCalculator.ComputeExplainHash(
|
||||
modelDate,
|
||||
"MODEL_UPDATE",
|
||||
EpssSignalEventTypes.ModelUpdated,
|
||||
oldVersion,
|
||||
newVersion,
|
||||
0,
|
||||
0,
|
||||
newVersion),
|
||||
Payload = payload
|
||||
};
|
||||
|
||||
await _signalRepository.CreateAsync(signal, cancellationToken);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Emitted MODEL_UPDATED signal: {OldVersion} -> {NewVersion}, affected {Count} CVEs",
|
||||
oldVersion,
|
||||
newVersion,
|
||||
affectedCveCount);
|
||||
}
|
||||
}
|
||||
@@ -3,11 +3,13 @@
|
||||
// Sprint: SPRINT_3500_0014_0001_native_analyzer_integration
|
||||
// Task: NAI-001
|
||||
// Description: Executes native binary analysis during container scans.
|
||||
// Note: NUC-004 (unknown classification) deferred - requires project reference.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Emit.Native;
|
||||
using StellaOps.Scanner.Worker.Diagnostics;
|
||||
using StellaOps.Scanner.Worker.Options;
|
||||
@@ -281,4 +283,7 @@ public sealed record NativeAnalysisResult
|
||||
|
||||
/// <summary>Emitted component results.</summary>
|
||||
public IReadOnlyList<NativeComponentEmitResult> Components { get; init; } = Array.Empty<NativeComponentEmitResult>();
|
||||
|
||||
/// <summary>Layer component fragments for SBOM merging.</summary>
|
||||
public IReadOnlyList<LayerComponentFragment> LayerFragments { get; init; } = Array.Empty<LayerComponentFragment>();
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ public static class ScanStageNames
|
||||
public const string PullLayers = "pull-layers";
|
||||
public const string BuildFilesystem = "build-filesystem";
|
||||
public const string ExecuteAnalyzers = "execute-analyzers";
|
||||
public const string EpssEnrichment = "epss-enrichment";
|
||||
public const string ComposeArtifacts = "compose-artifacts";
|
||||
public const string EmitReports = "emit-reports";
|
||||
public const string Entropy = "entropy";
|
||||
@@ -20,8 +21,10 @@ public static class ScanStageNames
|
||||
PullLayers,
|
||||
BuildFilesystem,
|
||||
ExecuteAnalyzers,
|
||||
EpssEnrichment,
|
||||
ComposeArtifacts,
|
||||
Entropy,
|
||||
EmitReports,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -133,6 +133,7 @@ builder.Services.AddSingleton<ILanguageAnalyzerPluginCatalog, LanguageAnalyzerPl
|
||||
builder.Services.AddSingleton<IScanAnalyzerDispatcher, CompositeScanAnalyzerDispatcher>();
|
||||
builder.Services.AddSingleton<IScanStageExecutor, RegistrySecretStageExecutor>();
|
||||
builder.Services.AddSingleton<IScanStageExecutor, AnalyzerStageExecutor>();
|
||||
builder.Services.AddSingleton<IScanStageExecutor, EpssEnrichmentStageExecutor>();
|
||||
builder.Services.AddSingleton<IScanStageExecutor, Reachability.ReachabilityBuildStageExecutor>();
|
||||
builder.Services.AddSingleton<IScanStageExecutor, Reachability.ReachabilityPublishStageExecutor>();
|
||||
builder.Services.AddSingleton<IScanStageExecutor, EntropyStageExecutor>();
|
||||
|
||||
@@ -0,0 +1,146 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AttestingRichGraphWriter.cs
|
||||
// Sprint: SPRINT_3620_0001_0001_reachability_witness_dsse
|
||||
// Description: RichGraphWriter wrapper that produces DSSE attestation alongside graph.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Attestation;
|
||||
|
||||
/// <summary>
|
||||
/// Result of writing a rich graph with attestation.
|
||||
/// </summary>
|
||||
/// <param name="GraphPath">Path to the richgraph-v1.json file.</param>
|
||||
/// <param name="MetaPath">Path to the meta.json file.</param>
|
||||
/// <param name="GraphHash">Content-addressed hash of the graph.</param>
|
||||
/// <param name="NodeCount">Number of nodes in the graph.</param>
|
||||
/// <param name="EdgeCount">Number of edges in the graph.</param>
|
||||
/// <param name="AttestationPath">Path to the attestation DSSE envelope (if produced).</param>
|
||||
/// <param name="WitnessResult">Detailed witness publication result (if attestation enabled).</param>
|
||||
public sealed record AttestingRichGraphWriteResult(
|
||||
string GraphPath,
|
||||
string MetaPath,
|
||||
string GraphHash,
|
||||
int NodeCount,
|
||||
int EdgeCount,
|
||||
string? AttestationPath,
|
||||
ReachabilityWitnessPublishResult? WitnessResult);
|
||||
|
||||
/// <summary>
|
||||
/// Writes richgraph-v1 documents with optional DSSE attestation.
|
||||
/// Wraps <see cref="RichGraphWriter"/> and integrates with <see cref="IReachabilityWitnessPublisher"/>.
|
||||
/// </summary>
|
||||
public sealed class AttestingRichGraphWriter
|
||||
{
|
||||
private readonly RichGraphWriter _graphWriter;
|
||||
private readonly IReachabilityWitnessPublisher _witnessPublisher;
|
||||
private readonly ReachabilityWitnessOptions _options;
|
||||
private readonly ILogger<AttestingRichGraphWriter> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new attesting rich graph writer.
|
||||
/// </summary>
|
||||
public AttestingRichGraphWriter(
|
||||
RichGraphWriter graphWriter,
|
||||
IReachabilityWitnessPublisher witnessPublisher,
|
||||
IOptions<ReachabilityWitnessOptions> options,
|
||||
ILogger<AttestingRichGraphWriter> logger)
|
||||
{
|
||||
_graphWriter = graphWriter ?? throw new ArgumentNullException(nameof(graphWriter));
|
||||
_witnessPublisher = witnessPublisher ?? throw new ArgumentNullException(nameof(witnessPublisher));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Writes the rich graph and produces attestation if enabled.
|
||||
/// </summary>
|
||||
/// <param name="graph">The rich graph to write.</param>
|
||||
/// <param name="outputRoot">Root output directory.</param>
|
||||
/// <param name="analysisId">Analysis identifier.</param>
|
||||
/// <param name="subjectDigest">Subject artifact digest for attestation.</param>
|
||||
/// <param name="policyHash">Optional policy hash for attestation.</param>
|
||||
/// <param name="sourceCommit">Optional source commit for attestation.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Write result including attestation details.</returns>
|
||||
public async Task<AttestingRichGraphWriteResult> WriteWithAttestationAsync(
|
||||
RichGraph graph,
|
||||
string outputRoot,
|
||||
string analysisId,
|
||||
string subjectDigest,
|
||||
string? policyHash = null,
|
||||
string? sourceCommit = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(graph);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(outputRoot);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(analysisId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(subjectDigest);
|
||||
|
||||
// Step 1: Write the graph using the standard writer
|
||||
var writeResult = await _graphWriter.WriteAsync(graph, outputRoot, analysisId, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Wrote rich graph: {GraphPath}, hash={GraphHash}, nodes={NodeCount}, edges={EdgeCount}",
|
||||
writeResult.GraphPath,
|
||||
writeResult.GraphHash,
|
||||
writeResult.NodeCount,
|
||||
writeResult.EdgeCount);
|
||||
|
||||
// Step 2: Produce attestation if enabled
|
||||
string? attestationPath = null;
|
||||
ReachabilityWitnessPublishResult? witnessResult = null;
|
||||
|
||||
if (_options.Enabled)
|
||||
{
|
||||
// Read the graph bytes for attestation
|
||||
var graphBytes = await File.ReadAllBytesAsync(writeResult.GraphPath, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
// Publish witness attestation
|
||||
witnessResult = await _witnessPublisher.PublishAsync(
|
||||
graph,
|
||||
graphBytes,
|
||||
writeResult.GraphHash,
|
||||
subjectDigest,
|
||||
policyHash,
|
||||
sourceCommit,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Write DSSE envelope to disk alongside the graph
|
||||
if (witnessResult.DsseEnvelopeBytes.Length > 0)
|
||||
{
|
||||
var graphDir = Path.GetDirectoryName(writeResult.GraphPath)!;
|
||||
attestationPath = Path.Combine(graphDir, "richgraph-v1.dsse.json");
|
||||
|
||||
await File.WriteAllBytesAsync(attestationPath, witnessResult.DsseEnvelopeBytes, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Wrote reachability witness attestation: {AttestationPath}, statementHash={StatementHash}",
|
||||
attestationPath,
|
||||
witnessResult.StatementHash);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug("Reachability witness attestation is disabled");
|
||||
}
|
||||
|
||||
return new AttestingRichGraphWriteResult(
|
||||
GraphPath: writeResult.GraphPath,
|
||||
MetaPath: writeResult.MetaPath,
|
||||
GraphHash: writeResult.GraphHash,
|
||||
NodeCount: writeResult.NodeCount,
|
||||
EdgeCount: writeResult.EdgeCount,
|
||||
AttestationPath: attestationPath,
|
||||
WitnessResult: witnessResult);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ReachabilityAttestationServiceCollectionExtensions.cs
|
||||
// Sprint: SPRINT_3620_0001_0001_reachability_witness_dsse
|
||||
// Description: DI registration for reachability witness attestation services.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Attestation;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering reachability witness attestation services.
|
||||
/// </summary>
|
||||
public static class ReachabilityAttestationServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds reachability witness attestation services to the service collection.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddReachabilityWitnessAttestation(this IServiceCollection services)
|
||||
{
|
||||
// Register DSSE builder
|
||||
services.TryAddSingleton<ReachabilityWitnessDsseBuilder>();
|
||||
|
||||
// Register publisher
|
||||
services.TryAddSingleton<IReachabilityWitnessPublisher, ReachabilityWitnessPublisher>();
|
||||
|
||||
// Register attesting writer (wraps RichGraphWriter)
|
||||
services.TryAddSingleton<AttestingRichGraphWriter>();
|
||||
|
||||
// Register options
|
||||
services.AddOptions<ReachabilityWitnessOptions>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configures reachability witness options.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="configure">Configuration action.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection ConfigureReachabilityWitnessOptions(
|
||||
this IServiceCollection services,
|
||||
Action<ReachabilityWitnessOptions> configure)
|
||||
{
|
||||
services.Configure(configure);
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,338 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CachingEpssProvider.cs
|
||||
// Sprint: SPRINT_3410_0002_0001_epss_scanner_integration
|
||||
// Task: EPSS-SCAN-005
|
||||
// Description: Valkey/Redis cache layer for EPSS lookups.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Messaging.Abstractions;
|
||||
using StellaOps.Scanner.Core.Epss;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Epss;
|
||||
|
||||
/// <summary>
|
||||
/// Caching decorator for <see cref="IEpssProvider"/> that uses Valkey/Redis.
|
||||
/// Provides read-through caching for EPSS score lookups.
|
||||
/// </summary>
|
||||
public sealed class CachingEpssProvider : IEpssProvider
|
||||
{
|
||||
private const string CacheKeyPrefix = "epss:current:";
|
||||
private const string ModelDateCacheKey = "epss:model-date";
|
||||
|
||||
private readonly IEpssProvider _innerProvider;
|
||||
private readonly IDistributedCache<EpssCacheEntry>? _cache;
|
||||
private readonly EpssProviderOptions _options;
|
||||
private readonly ILogger<CachingEpssProvider> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public CachingEpssProvider(
|
||||
IEpssProvider innerProvider,
|
||||
IDistributedCache<EpssCacheEntry>? cache,
|
||||
IOptions<EpssProviderOptions> options,
|
||||
ILogger<CachingEpssProvider> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_innerProvider = innerProvider ?? throw new ArgumentNullException(nameof(innerProvider));
|
||||
_cache = cache; // Can be null if caching is disabled
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public async Task<EpssEvidence?> GetCurrentAsync(string cveId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(cveId);
|
||||
|
||||
// If caching is disabled or cache is unavailable, go directly to inner provider
|
||||
if (!_options.EnableCache || _cache is null)
|
||||
{
|
||||
return await _innerProvider.GetCurrentAsync(cveId, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
var cacheKey = BuildCacheKey(cveId);
|
||||
|
||||
try
|
||||
{
|
||||
var cacheResult = await _cache.GetAsync(cacheKey, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (cacheResult.IsHit && cacheResult.Value is not null)
|
||||
{
|
||||
_logger.LogDebug("Cache hit for EPSS score: {CveId}", cveId);
|
||||
return MapFromCacheEntry(cacheResult.Value, fromCache: true);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Cache failures should not block the request
|
||||
_logger.LogWarning(ex, "Cache lookup failed for {CveId}, falling back to database", cveId);
|
||||
}
|
||||
|
||||
// Cache miss - fetch from database
|
||||
var evidence = await _innerProvider.GetCurrentAsync(cveId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (evidence is not null)
|
||||
{
|
||||
await TryCacheAsync(cacheKey, evidence, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
return evidence;
|
||||
}
|
||||
|
||||
public async Task<EpssBatchResult> GetCurrentBatchAsync(
|
||||
IEnumerable<string> cveIds,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(cveIds);
|
||||
|
||||
var cveIdList = cveIds.Distinct(StringComparer.OrdinalIgnoreCase).ToList();
|
||||
if (cveIdList.Count == 0)
|
||||
{
|
||||
return new EpssBatchResult
|
||||
{
|
||||
Found = Array.Empty<EpssEvidence>(),
|
||||
NotFound = Array.Empty<string>(),
|
||||
ModelDate = DateOnly.FromDateTime(_timeProvider.GetUtcNow().Date),
|
||||
LookupTimeMs = 0
|
||||
};
|
||||
}
|
||||
|
||||
// If caching is disabled, go directly to inner provider
|
||||
if (!_options.EnableCache || _cache is null)
|
||||
{
|
||||
return await _innerProvider.GetCurrentBatchAsync(cveIdList, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
var sw = Stopwatch.StartNew();
|
||||
var found = new List<EpssEvidence>();
|
||||
var notInCache = new List<string>();
|
||||
var cacheHits = 0;
|
||||
DateOnly? modelDate = null;
|
||||
|
||||
// Try cache first for each CVE
|
||||
foreach (var cveId in cveIdList)
|
||||
{
|
||||
try
|
||||
{
|
||||
var cacheKey = BuildCacheKey(cveId);
|
||||
var cacheResult = await _cache.GetAsync(cacheKey, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (cacheResult.IsHit && cacheResult.Value is not null)
|
||||
{
|
||||
var evidence = MapFromCacheEntry(cacheResult.Value, fromCache: true);
|
||||
found.Add(evidence);
|
||||
modelDate ??= evidence.ModelDate;
|
||||
cacheHits++;
|
||||
}
|
||||
else
|
||||
{
|
||||
notInCache.Add(cveId);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Cache failure - will need to fetch from DB
|
||||
_logger.LogDebug(ex, "Cache lookup failed for {CveId}", cveId);
|
||||
notInCache.Add(cveId);
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"EPSS cache: {CacheHits}/{Total} hits, {CacheMisses} to fetch from database",
|
||||
cacheHits,
|
||||
cveIdList.Count,
|
||||
notInCache.Count);
|
||||
|
||||
// Fetch remaining from database
|
||||
if (notInCache.Count > 0)
|
||||
{
|
||||
var dbResult = await _innerProvider.GetCurrentBatchAsync(notInCache, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
foreach (var evidence in dbResult.Found)
|
||||
{
|
||||
found.Add(evidence);
|
||||
modelDate ??= evidence.ModelDate;
|
||||
|
||||
// Populate cache
|
||||
await TryCacheAsync(BuildCacheKey(evidence.CveId), evidence, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
// Add CVEs not found in database to the not found list
|
||||
var notFound = dbResult.NotFound.ToList();
|
||||
|
||||
sw.Stop();
|
||||
|
||||
return new EpssBatchResult
|
||||
{
|
||||
Found = found,
|
||||
NotFound = notFound,
|
||||
ModelDate = modelDate ?? DateOnly.FromDateTime(_timeProvider.GetUtcNow().Date),
|
||||
LookupTimeMs = sw.ElapsedMilliseconds,
|
||||
PartiallyFromCache = cacheHits > 0 && notInCache.Count > 0
|
||||
};
|
||||
}
|
||||
|
||||
sw.Stop();
|
||||
|
||||
return new EpssBatchResult
|
||||
{
|
||||
Found = found,
|
||||
NotFound = Array.Empty<string>(),
|
||||
ModelDate = modelDate ?? DateOnly.FromDateTime(_timeProvider.GetUtcNow().Date),
|
||||
LookupTimeMs = sw.ElapsedMilliseconds,
|
||||
PartiallyFromCache = cacheHits > 0
|
||||
};
|
||||
}
|
||||
|
||||
public Task<EpssEvidence?> GetAsOfDateAsync(
|
||||
string cveId,
|
||||
DateOnly asOfDate,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Historical lookups are not cached - they're typically one-off queries
|
||||
return _innerProvider.GetAsOfDateAsync(cveId, asOfDate, cancellationToken);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<EpssEvidence>> GetHistoryAsync(
|
||||
string cveId,
|
||||
DateOnly startDate,
|
||||
DateOnly endDate,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// History lookups are not cached
|
||||
return _innerProvider.GetHistoryAsync(cveId, startDate, endDate, cancellationToken);
|
||||
}
|
||||
|
||||
public async Task<DateOnly?> GetLatestModelDateAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Try cache first (short TTL for model date)
|
||||
if (_options.EnableCache && _cache is not null)
|
||||
{
|
||||
try
|
||||
{
|
||||
var cacheResult = await _cache.GetAsync(ModelDateCacheKey, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (cacheResult.IsHit && cacheResult.Value?.ModelDate is not null)
|
||||
{
|
||||
return cacheResult.Value.ModelDate;
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogDebug(ex, "Cache lookup failed for model date");
|
||||
}
|
||||
}
|
||||
|
||||
var modelDate = await _innerProvider.GetLatestModelDateAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Cache model date with shorter TTL (5 minutes)
|
||||
if (modelDate.HasValue && _options.EnableCache && _cache is not null)
|
||||
{
|
||||
try
|
||||
{
|
||||
await _cache.SetAsync(
|
||||
ModelDateCacheKey,
|
||||
new EpssCacheEntry { ModelDate = modelDate.Value },
|
||||
new CacheEntryOptions { TimeToLive = TimeSpan.FromMinutes(5) },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogDebug(ex, "Failed to cache model date");
|
||||
}
|
||||
}
|
||||
|
||||
return modelDate;
|
||||
}
|
||||
|
||||
public Task<bool> IsAvailableAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
return _innerProvider.IsAvailableAsync(cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Invalidates all cached EPSS scores. Called after new EPSS data is ingested.
|
||||
/// </summary>
|
||||
public async Task InvalidateCacheAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (_cache is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var invalidated = await _cache.InvalidateByPatternAsync($"{CacheKeyPrefix}*", cancellationToken).ConfigureAwait(false);
|
||||
await _cache.InvalidateAsync(ModelDateCacheKey, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation("Invalidated {Count} EPSS cache entries", invalidated + 1);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to invalidate EPSS cache");
|
||||
}
|
||||
}
|
||||
|
||||
private static string BuildCacheKey(string cveId)
|
||||
{
|
||||
return $"{CacheKeyPrefix}{cveId.ToUpperInvariant()}";
|
||||
}
|
||||
|
||||
private async Task TryCacheAsync(string cacheKey, EpssEvidence evidence, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_cache is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var cacheEntry = new EpssCacheEntry
|
||||
{
|
||||
CveId = evidence.CveId,
|
||||
Score = evidence.Score,
|
||||
Percentile = evidence.Percentile,
|
||||
ModelDate = evidence.ModelDate,
|
||||
CachedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
|
||||
await _cache.SetAsync(
|
||||
cacheKey,
|
||||
cacheEntry,
|
||||
new CacheEntryOptions { TimeToLive = _options.CacheTtl },
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogDebug(ex, "Failed to cache EPSS score for {CveId}", evidence.CveId);
|
||||
}
|
||||
}
|
||||
|
||||
private EpssEvidence MapFromCacheEntry(EpssCacheEntry entry, bool fromCache)
|
||||
{
|
||||
return new EpssEvidence
|
||||
{
|
||||
CveId = entry.CveId ?? string.Empty,
|
||||
Score = entry.Score,
|
||||
Percentile = entry.Percentile,
|
||||
ModelDate = entry.ModelDate,
|
||||
CapturedAt = entry.CachedAt,
|
||||
Source = "cache",
|
||||
FromCache = fromCache
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Cache entry for EPSS scores.
|
||||
/// </summary>
|
||||
public sealed class EpssCacheEntry
|
||||
{
|
||||
public string? CveId { get; set; }
|
||||
public double Score { get; set; }
|
||||
public double Percentile { get; set; }
|
||||
public DateOnly ModelDate { get; set; }
|
||||
public DateTimeOffset CachedAt { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EpssChangeRecord.cs
|
||||
// Sprint: SPRINT_3413_0001_0001_epss_live_enrichment
|
||||
// Task: #3 - Implement epss_changes flag logic
|
||||
// Description: Record representing an EPSS change that needs processing.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Scanner.Core.Epss;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Epss;
|
||||
|
||||
/// <summary>
|
||||
/// Record representing an EPSS change that needs processing.
|
||||
/// </summary>
|
||||
public sealed record EpssChangeRecord
|
||||
{
|
||||
/// <summary>
|
||||
/// CVE identifier.
|
||||
/// </summary>
|
||||
public required string CveId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Change flags indicating what changed.
|
||||
/// </summary>
|
||||
public EpssChangeFlags Flags { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Previous EPSS score (if available).
|
||||
/// </summary>
|
||||
public double? PreviousScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// New EPSS score.
|
||||
/// </summary>
|
||||
public double NewScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// New EPSS percentile.
|
||||
/// </summary>
|
||||
public double NewPercentile { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Previous priority band (if available).
|
||||
/// </summary>
|
||||
public EpssPriorityBand PreviousBand { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Model date for this change.
|
||||
/// </summary>
|
||||
public DateOnly ModelDate { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EpssExplainHashCalculator.cs
|
||||
// Sprint: SPRINT_3413_0001_0001_epss_live_enrichment
|
||||
// Task: S4 - Implement ComputeExplainHash
|
||||
// Description: Deterministic SHA-256 hash calculator for EPSS signal explainability.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Epss;
|
||||
|
||||
/// <summary>
|
||||
/// Calculator for deterministic explain hashes on EPSS signals.
|
||||
/// The explain hash provides a unique fingerprint for signal inputs,
|
||||
/// enabling audit trails and change detection.
|
||||
/// </summary>
|
||||
public static class EpssExplainHashCalculator
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Computes a deterministic SHA-256 hash from signal input parameters.
|
||||
/// </summary>
|
||||
/// <param name="modelDate">EPSS model date.</param>
|
||||
/// <param name="cveId">CVE identifier.</param>
|
||||
/// <param name="eventType">Event type (RISK_SPIKE, BAND_CHANGE, etc.).</param>
|
||||
/// <param name="oldBand">Previous risk band (nullable).</param>
|
||||
/// <param name="newBand">New risk band (nullable).</param>
|
||||
/// <param name="score">EPSS score.</param>
|
||||
/// <param name="percentile">EPSS percentile.</param>
|
||||
/// <param name="modelVersion">EPSS model version.</param>
|
||||
/// <returns>SHA-256 hash as byte array.</returns>
|
||||
public static byte[] ComputeExplainHash(
|
||||
DateOnly modelDate,
|
||||
string cveId,
|
||||
string eventType,
|
||||
string? oldBand,
|
||||
string? newBand,
|
||||
double score,
|
||||
double percentile,
|
||||
string? modelVersion)
|
||||
{
|
||||
// Create deterministic input structure
|
||||
var input = new ExplainHashInput
|
||||
{
|
||||
ModelDate = modelDate.ToString("yyyy-MM-dd"),
|
||||
CveId = cveId.ToUpperInvariant(), // Normalize CVE ID
|
||||
EventType = eventType.ToUpperInvariant(),
|
||||
OldBand = oldBand?.ToUpperInvariant() ?? "NONE",
|
||||
NewBand = newBand?.ToUpperInvariant() ?? "NONE",
|
||||
Score = Math.Round(score, 6), // Consistent precision
|
||||
Percentile = Math.Round(percentile, 6),
|
||||
ModelVersion = modelVersion ?? string.Empty
|
||||
};
|
||||
|
||||
// Serialize to deterministic JSON
|
||||
var json = JsonSerializer.Serialize(input, JsonOptions);
|
||||
var bytes = Encoding.UTF8.GetBytes(json);
|
||||
|
||||
return SHA256.HashData(bytes);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes the dedupe key for an EPSS signal.
|
||||
/// This key is used to prevent duplicate signals.
|
||||
/// </summary>
|
||||
/// <param name="modelDate">EPSS model date.</param>
|
||||
/// <param name="cveId">CVE identifier.</param>
|
||||
/// <param name="eventType">Event type.</param>
|
||||
/// <param name="oldBand">Previous risk band.</param>
|
||||
/// <param name="newBand">New risk band.</param>
|
||||
/// <returns>Deterministic dedupe key string.</returns>
|
||||
public static string ComputeDedupeKey(
|
||||
DateOnly modelDate,
|
||||
string cveId,
|
||||
string eventType,
|
||||
string? oldBand,
|
||||
string? newBand)
|
||||
{
|
||||
return $"{modelDate:yyyy-MM-dd}:{cveId.ToUpperInvariant()}:{eventType.ToUpperInvariant()}:{oldBand?.ToUpperInvariant() ?? "NONE"}->{newBand?.ToUpperInvariant() ?? "NONE"}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts an explain hash to hex string for display.
|
||||
/// </summary>
|
||||
/// <param name="hash">The hash bytes.</param>
|
||||
/// <returns>Lowercase hex string.</returns>
|
||||
public static string ToHexString(byte[] hash)
|
||||
{
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private sealed record ExplainHashInput
|
||||
{
|
||||
public required string ModelDate { get; init; }
|
||||
public required string CveId { get; init; }
|
||||
public required string EventType { get; init; }
|
||||
public required string OldBand { get; init; }
|
||||
public required string NewBand { get; init; }
|
||||
public required double Score { get; init; }
|
||||
public required double Percentile { get; init; }
|
||||
public required string ModelVersion { get; init; }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,285 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EpssReplayService.cs
|
||||
// Sprint: SPRINT_3413_0001_0001_epss_live_enrichment
|
||||
// Task: R4 - Implement ReplayFromRawAsync
|
||||
// Description: Service for replaying EPSS data from stored raw payloads.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Epss;
|
||||
|
||||
/// <summary>
|
||||
/// Result of an EPSS replay operation.
|
||||
/// </summary>
|
||||
public sealed record EpssReplayResult
|
||||
{
|
||||
/// <summary>
|
||||
/// The model date that was replayed.
|
||||
/// </summary>
|
||||
public required DateOnly ModelDate { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of rows replayed.
|
||||
/// </summary>
|
||||
public required int RowCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of distinct CVEs.
|
||||
/// </summary>
|
||||
public required int DistinctCveCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether this was a dry run (no writes).
|
||||
/// </summary>
|
||||
public required bool IsDryRun { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Duration of the replay in milliseconds.
|
||||
/// </summary>
|
||||
public required long DurationMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Model version from the raw payload.
|
||||
/// </summary>
|
||||
public string? ModelVersion { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for replaying EPSS data from stored raw payloads.
|
||||
/// Enables deterministic re-normalization without re-downloading from FIRST.org.
|
||||
/// </summary>
|
||||
public sealed class EpssReplayService
|
||||
{
|
||||
private readonly IEpssRawRepository _rawRepository;
|
||||
private readonly IEpssRepository _epssRepository;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<EpssReplayService> _logger;
|
||||
|
||||
public EpssReplayService(
|
||||
IEpssRawRepository rawRepository,
|
||||
IEpssRepository epssRepository,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<EpssReplayService> logger)
|
||||
{
|
||||
_rawRepository = rawRepository ?? throw new ArgumentNullException(nameof(rawRepository));
|
||||
_epssRepository = epssRepository ?? throw new ArgumentNullException(nameof(epssRepository));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Replays EPSS data from a stored raw payload for a specific date.
|
||||
/// Re-normalizes the data into the epss_snapshot table without re-downloading.
|
||||
/// </summary>
|
||||
/// <param name="modelDate">The model date to replay.</param>
|
||||
/// <param name="dryRun">If true, validates but doesn't write.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Result of the replay operation.</returns>
|
||||
public async Task<EpssReplayResult> ReplayFromRawAsync(
|
||||
DateOnly modelDate,
|
||||
bool dryRun = false,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var stopwatch = System.Diagnostics.Stopwatch.StartNew();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Starting EPSS replay from raw for {ModelDate} (dryRun={DryRun})",
|
||||
modelDate,
|
||||
dryRun);
|
||||
|
||||
// Fetch the raw payload
|
||||
var raw = await _rawRepository.GetByDateAsync(modelDate, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (raw is null)
|
||||
{
|
||||
throw new InvalidOperationException($"No raw EPSS payload found for {modelDate}");
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Found raw payload: rawId={RawId}, rows={RowCount}, modelVersion={ModelVersion}",
|
||||
raw.RawId,
|
||||
raw.RowCount,
|
||||
raw.ModelVersion);
|
||||
|
||||
// Parse the JSON payload
|
||||
var rows = ParseRawPayload(raw.Payload);
|
||||
|
||||
if (dryRun)
|
||||
{
|
||||
stopwatch.Stop();
|
||||
|
||||
_logger.LogInformation(
|
||||
"EPSS replay dry run completed: modelDate={ModelDate}, rows={RowCount}, cves={CveCount}, duration={Duration}ms",
|
||||
modelDate,
|
||||
rows.Count,
|
||||
rows.Select(r => r.CveId).Distinct().Count(),
|
||||
stopwatch.ElapsedMilliseconds);
|
||||
|
||||
return new EpssReplayResult
|
||||
{
|
||||
ModelDate = modelDate,
|
||||
RowCount = rows.Count,
|
||||
DistinctCveCount = rows.Select(r => r.CveId).Distinct().Count(),
|
||||
IsDryRun = true,
|
||||
DurationMs = stopwatch.ElapsedMilliseconds,
|
||||
ModelVersion = raw.ModelVersion
|
||||
};
|
||||
}
|
||||
|
||||
// Create a new import run for the replay
|
||||
var importRun = await _epssRepository.BeginImportAsync(
|
||||
modelDate,
|
||||
$"replay:{raw.SourceUri}",
|
||||
_timeProvider.GetUtcNow(),
|
||||
Convert.ToHexString(raw.PayloadSha256).ToLowerInvariant(),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
try
|
||||
{
|
||||
// Write the snapshot using async enumerable
|
||||
var writeResult = await _epssRepository.WriteSnapshotAsync(
|
||||
importRun.ImportRunId,
|
||||
modelDate,
|
||||
_timeProvider.GetUtcNow(),
|
||||
ToAsyncEnumerable(rows),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Mark success
|
||||
await _epssRepository.MarkImportSucceededAsync(
|
||||
importRun.ImportRunId,
|
||||
rows.Count,
|
||||
Convert.ToHexString(raw.PayloadSha256).ToLowerInvariant(),
|
||||
raw.ModelVersion,
|
||||
raw.PublishedDate,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
stopwatch.Stop();
|
||||
|
||||
_logger.LogInformation(
|
||||
"EPSS replay completed: modelDate={ModelDate}, rows={RowCount}, cves={CveCount}, duration={Duration}ms",
|
||||
modelDate,
|
||||
writeResult.RowCount,
|
||||
writeResult.DistinctCveCount,
|
||||
stopwatch.ElapsedMilliseconds);
|
||||
|
||||
return new EpssReplayResult
|
||||
{
|
||||
ModelDate = modelDate,
|
||||
RowCount = writeResult.RowCount,
|
||||
DistinctCveCount = writeResult.DistinctCveCount,
|
||||
IsDryRun = false,
|
||||
DurationMs = stopwatch.ElapsedMilliseconds,
|
||||
ModelVersion = raw.ModelVersion
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
await _epssRepository.MarkImportFailedAsync(
|
||||
importRun.ImportRunId,
|
||||
$"Replay failed: {ex.Message}",
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Replays EPSS data for a date range.
|
||||
/// </summary>
|
||||
/// <param name="startDate">Start date (inclusive).</param>
|
||||
/// <param name="endDate">End date (inclusive).</param>
|
||||
/// <param name="dryRun">If true, validates but doesn't write.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Results for each date replayed.</returns>
|
||||
public async Task<IReadOnlyList<EpssReplayResult>> ReplayRangeAsync(
|
||||
DateOnly startDate,
|
||||
DateOnly endDate,
|
||||
bool dryRun = false,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var results = new List<EpssReplayResult>();
|
||||
|
||||
var rawPayloads = await _rawRepository.GetByDateRangeAsync(startDate, endDate, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Replaying {Count} EPSS payloads from {StartDate} to {EndDate}",
|
||||
rawPayloads.Count,
|
||||
startDate,
|
||||
endDate);
|
||||
|
||||
foreach (var raw in rawPayloads.OrderBy(r => r.AsOfDate))
|
||||
{
|
||||
try
|
||||
{
|
||||
var result = await ReplayFromRawAsync(raw.AsOfDate, dryRun, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
results.Add(result);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to replay EPSS for {ModelDate}", raw.AsOfDate);
|
||||
// Continue with next date
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets available dates for replay.
|
||||
/// </summary>
|
||||
/// <param name="startDate">Optional start date filter.</param>
|
||||
/// <param name="endDate">Optional end date filter.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>List of available model dates.</returns>
|
||||
public async Task<IReadOnlyList<DateOnly>> GetAvailableDatesAsync(
|
||||
DateOnly? startDate = null,
|
||||
DateOnly? endDate = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var start = startDate ?? DateOnly.FromDateTime(DateTime.UtcNow.AddYears(-1));
|
||||
var end = endDate ?? DateOnly.FromDateTime(DateTime.UtcNow);
|
||||
|
||||
var rawPayloads = await _rawRepository.GetByDateRangeAsync(start, end, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return rawPayloads.Select(r => r.AsOfDate).OrderByDescending(d => d).ToList();
|
||||
}
|
||||
|
||||
private static List<EpssScoreRow> ParseRawPayload(string jsonPayload)
|
||||
{
|
||||
var rows = new List<EpssScoreRow>();
|
||||
|
||||
using var doc = JsonDocument.Parse(jsonPayload);
|
||||
|
||||
foreach (var element in doc.RootElement.EnumerateArray())
|
||||
{
|
||||
var cveId = element.GetProperty("cve").GetString();
|
||||
var score = element.GetProperty("epss").GetDouble();
|
||||
var percentile = element.GetProperty("percentile").GetDouble();
|
||||
|
||||
if (!string.IsNullOrEmpty(cveId))
|
||||
{
|
||||
rows.Add(new EpssScoreRow(cveId, score, percentile));
|
||||
}
|
||||
}
|
||||
|
||||
return rows;
|
||||
}
|
||||
|
||||
private static async IAsyncEnumerable<EpssScoreRow> ToAsyncEnumerable(
|
||||
IEnumerable<EpssScoreRow> rows)
|
||||
{
|
||||
foreach (var row in rows)
|
||||
{
|
||||
yield return row;
|
||||
}
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,104 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IEpssSignalPublisher.cs
|
||||
// Sprint: SPRINT_3413_0001_0001_epss_live_enrichment
|
||||
// Task: S9 - Connect to Notify/Router
|
||||
// Description: Interface for publishing EPSS signals to the notification system.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Epss;
|
||||
|
||||
/// <summary>
|
||||
/// Result of publishing an EPSS signal.
|
||||
/// </summary>
|
||||
public sealed record EpssSignalPublishResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the publish was successful.
|
||||
/// </summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Message ID from the queue (if applicable).
|
||||
/// </summary>
|
||||
public string? MessageId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if publish failed.
|
||||
/// </summary>
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Publisher for EPSS signals to the notification system.
|
||||
/// Routes signals to the appropriate topics based on event type.
|
||||
/// </summary>
|
||||
public interface IEpssSignalPublisher
|
||||
{
|
||||
/// <summary>
|
||||
/// Topic name for EPSS signals.
|
||||
/// </summary>
|
||||
const string TopicName = "signals.epss";
|
||||
|
||||
/// <summary>
|
||||
/// Publishes an EPSS signal to the notification system.
|
||||
/// </summary>
|
||||
/// <param name="signal">The signal to publish.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Result of the publish operation.</returns>
|
||||
Task<EpssSignalPublishResult> PublishAsync(
|
||||
EpssSignal signal,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Publishes multiple EPSS signals in a batch.
|
||||
/// </summary>
|
||||
/// <param name="signals">The signals to publish.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of successfully published signals.</returns>
|
||||
Task<int> PublishBatchAsync(
|
||||
IEnumerable<EpssSignal> signals,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Publishes a priority change event.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="cveId">CVE identifier.</param>
|
||||
/// <param name="oldBand">Previous priority band.</param>
|
||||
/// <param name="newBand">New priority band.</param>
|
||||
/// <param name="epssScore">Current EPSS score.</param>
|
||||
/// <param name="modelDate">EPSS model date.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Result of the publish operation.</returns>
|
||||
Task<EpssSignalPublishResult> PublishPriorityChangedAsync(
|
||||
Guid tenantId,
|
||||
string cveId,
|
||||
string oldBand,
|
||||
string newBand,
|
||||
double epssScore,
|
||||
DateOnly modelDate,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Null implementation of IEpssSignalPublisher for when messaging is disabled.
|
||||
/// </summary>
|
||||
public sealed class NullEpssSignalPublisher : IEpssSignalPublisher
|
||||
{
|
||||
public static readonly NullEpssSignalPublisher Instance = new();
|
||||
|
||||
private NullEpssSignalPublisher() { }
|
||||
|
||||
public Task<EpssSignalPublishResult> PublishAsync(EpssSignal signal, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(new EpssSignalPublishResult { Success = true, MessageId = "null" });
|
||||
|
||||
public Task<int> PublishBatchAsync(IEnumerable<EpssSignal> signals, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(signals.Count());
|
||||
|
||||
public Task<EpssSignalPublishResult> PublishPriorityChangedAsync(
|
||||
Guid tenantId, string cveId, string oldBand, string newBand, double epssScore, DateOnly modelDate,
|
||||
CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(new EpssSignalPublishResult { Success = true, MessageId = "null" });
|
||||
}
|
||||
@@ -0,0 +1,165 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EpssServiceCollectionExtensions.cs
|
||||
// Sprint: SPRINT_3410_0002_0001_epss_scanner_integration
|
||||
// Task: EPSS-SCAN-005
|
||||
// Description: DI registration for EPSS services with optional Valkey cache layer.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Messaging.Abstractions;
|
||||
using StellaOps.Scanner.Core.Epss;
|
||||
using StellaOps.Scanner.Storage.Epss;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Extensions;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering EPSS services with optional Valkey caching.
|
||||
/// </summary>
|
||||
public static class EpssServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds EPSS provider services to the service collection.
|
||||
/// Includes optional Valkey/Redis cache layer based on configuration.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="configuration">The configuration section for EPSS options.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddEpssProvider(
|
||||
this IServiceCollection services,
|
||||
IConfiguration configuration)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(configuration);
|
||||
|
||||
// Bind EPSS provider options
|
||||
services.AddOptions<EpssProviderOptions>()
|
||||
.Bind(configuration.GetSection(EpssProviderOptions.SectionName))
|
||||
.ValidateOnStart();
|
||||
|
||||
// Register the base PostgreSQL-backed provider
|
||||
services.TryAddScoped<EpssProvider>();
|
||||
|
||||
// Register the caching decorator
|
||||
services.TryAddScoped<IEpssProvider>(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<EpssProviderOptions>>().Value;
|
||||
var innerProvider = sp.GetRequiredService<EpssProvider>();
|
||||
var logger = sp.GetRequiredService<ILogger<CachingEpssProvider>>();
|
||||
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
|
||||
|
||||
// If caching is disabled, return the inner provider directly
|
||||
if (!options.EnableCache)
|
||||
{
|
||||
return innerProvider;
|
||||
}
|
||||
|
||||
// Try to get the cache factory (may be null if Valkey is not configured)
|
||||
var cacheFactory = sp.GetService<IDistributedCacheFactory>();
|
||||
IDistributedCache<EpssCacheEntry>? cache = null;
|
||||
|
||||
if (cacheFactory is not null)
|
||||
{
|
||||
try
|
||||
{
|
||||
cache = cacheFactory.Create<EpssCacheEntry>(new CacheOptions
|
||||
{
|
||||
KeyPrefix = "epss:",
|
||||
DefaultTtl = options.CacheTtl,
|
||||
SlidingExpiration = false
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogWarning(
|
||||
ex,
|
||||
"Failed to create EPSS cache, falling back to uncached provider. " +
|
||||
"Ensure Valkey/Redis is configured if caching is desired.");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
logger.LogDebug(
|
||||
"No IDistributedCacheFactory registered. EPSS caching will be disabled. " +
|
||||
"Register StellaOps.Messaging.Transport.Valkey to enable caching.");
|
||||
}
|
||||
|
||||
return new CachingEpssProvider(
|
||||
innerProvider,
|
||||
cache,
|
||||
sp.GetRequiredService<IOptions<EpssProviderOptions>>(),
|
||||
logger,
|
||||
timeProvider);
|
||||
});
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds EPSS provider services with explicit options configuration.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="configure">The configuration action.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddEpssProvider(
|
||||
this IServiceCollection services,
|
||||
Action<EpssProviderOptions> configure)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(configure);
|
||||
|
||||
services.AddOptions<EpssProviderOptions>()
|
||||
.Configure(configure)
|
||||
.ValidateOnStart();
|
||||
|
||||
// Register the base PostgreSQL-backed provider
|
||||
services.TryAddScoped<EpssProvider>();
|
||||
|
||||
// Register the caching decorator
|
||||
services.TryAddScoped<IEpssProvider>(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<EpssProviderOptions>>().Value;
|
||||
var innerProvider = sp.GetRequiredService<EpssProvider>();
|
||||
var logger = sp.GetRequiredService<ILogger<CachingEpssProvider>>();
|
||||
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
|
||||
|
||||
// If caching is disabled, return the inner provider directly
|
||||
if (!options.EnableCache)
|
||||
{
|
||||
return innerProvider;
|
||||
}
|
||||
|
||||
// Try to get the cache factory
|
||||
var cacheFactory = sp.GetService<IDistributedCacheFactory>();
|
||||
IDistributedCache<EpssCacheEntry>? cache = null;
|
||||
|
||||
if (cacheFactory is not null)
|
||||
{
|
||||
try
|
||||
{
|
||||
cache = cacheFactory.Create<EpssCacheEntry>(new CacheOptions
|
||||
{
|
||||
KeyPrefix = "epss:",
|
||||
DefaultTtl = options.CacheTtl,
|
||||
SlidingExpiration = false
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogWarning(ex, "Failed to create EPSS cache");
|
||||
}
|
||||
}
|
||||
|
||||
return new CachingEpssProvider(
|
||||
innerProvider,
|
||||
cache,
|
||||
sp.GetRequiredService<IOptions<EpssProviderOptions>>(),
|
||||
logger,
|
||||
timeProvider);
|
||||
});
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -90,6 +90,22 @@ public static class ServiceCollectionExtensions
|
||||
services.AddSingleton<EpssBundleSource>();
|
||||
// Note: EpssChangeDetector is a static class, no DI registration needed
|
||||
|
||||
// EPSS provider with optional Valkey cache layer (Sprint: SPRINT_3410_0002_0001, Task: EPSS-SCAN-005)
|
||||
services.AddEpssProvider(options =>
|
||||
{
|
||||
// Default configuration - can be overridden via config binding
|
||||
options.EnableCache = true;
|
||||
options.CacheTtl = TimeSpan.FromHours(1);
|
||||
options.MaxBatchSize = 1000;
|
||||
});
|
||||
|
||||
// EPSS raw and signal repositories (Sprint: SPRINT_3413_0001_0001)
|
||||
services.AddScoped<IEpssRawRepository, PostgresEpssRawRepository>();
|
||||
services.AddScoped<IEpssSignalRepository, PostgresEpssSignalRepository>();
|
||||
services.AddScoped<IObservedCveRepository, PostgresObservedCveRepository>();
|
||||
services.AddSingleton<EpssReplayService>();
|
||||
services.TryAddSingleton<IEpssSignalPublisher, NullEpssSignalPublisher>();
|
||||
|
||||
// Witness storage (Sprint: SPRINT_3700_0001_0001)
|
||||
services.AddScoped<IWitnessRepository, PostgresWitnessRepository>();
|
||||
|
||||
|
||||
@@ -0,0 +1,150 @@
|
||||
-- SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
-- Sprint: 3413
|
||||
-- Task: Task #2 - vuln_instance_triage schema updates
|
||||
-- Description: Adds EPSS tracking columns to vulnerability instance triage table
|
||||
|
||||
-- ============================================================================
|
||||
-- EPSS Tracking Columns for Vulnerability Instances
|
||||
-- ============================================================================
|
||||
-- These columns store the current EPSS state for each vulnerability instance,
|
||||
-- enabling efficient priority band calculation and change detection.
|
||||
|
||||
-- Add EPSS columns to vuln_instance_triage if table exists
|
||||
DO $$
|
||||
BEGIN
|
||||
-- Check if table exists
|
||||
IF EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'vuln_instance_triage') THEN
|
||||
-- Add current_epss_score column
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'vuln_instance_triage' AND column_name = 'current_epss_score') THEN
|
||||
ALTER TABLE vuln_instance_triage ADD COLUMN current_epss_score DOUBLE PRECISION;
|
||||
COMMENT ON COLUMN vuln_instance_triage.current_epss_score IS 'Current EPSS probability score [0,1]';
|
||||
END IF;
|
||||
|
||||
-- Add current_epss_percentile column
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'vuln_instance_triage' AND column_name = 'current_epss_percentile') THEN
|
||||
ALTER TABLE vuln_instance_triage ADD COLUMN current_epss_percentile DOUBLE PRECISION;
|
||||
COMMENT ON COLUMN vuln_instance_triage.current_epss_percentile IS 'Current EPSS percentile rank [0,1]';
|
||||
END IF;
|
||||
|
||||
-- Add current_epss_band column
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'vuln_instance_triage' AND column_name = 'current_epss_band') THEN
|
||||
ALTER TABLE vuln_instance_triage ADD COLUMN current_epss_band TEXT;
|
||||
COMMENT ON COLUMN vuln_instance_triage.current_epss_band IS 'Current EPSS priority band: CRITICAL, HIGH, MEDIUM, LOW';
|
||||
END IF;
|
||||
|
||||
-- Add epss_model_date column
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'vuln_instance_triage' AND column_name = 'epss_model_date') THEN
|
||||
ALTER TABLE vuln_instance_triage ADD COLUMN epss_model_date DATE;
|
||||
COMMENT ON COLUMN vuln_instance_triage.epss_model_date IS 'EPSS model date when last updated';
|
||||
END IF;
|
||||
|
||||
-- Add epss_updated_at column
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'vuln_instance_triage' AND column_name = 'epss_updated_at') THEN
|
||||
ALTER TABLE vuln_instance_triage ADD COLUMN epss_updated_at TIMESTAMPTZ;
|
||||
COMMENT ON COLUMN vuln_instance_triage.epss_updated_at IS 'Timestamp when EPSS data was last updated';
|
||||
END IF;
|
||||
|
||||
-- Add previous_epss_band column (for change tracking)
|
||||
IF NOT EXISTS (SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name = 'vuln_instance_triage' AND column_name = 'previous_epss_band') THEN
|
||||
ALTER TABLE vuln_instance_triage ADD COLUMN previous_epss_band TEXT;
|
||||
COMMENT ON COLUMN vuln_instance_triage.previous_epss_band IS 'Previous EPSS priority band before last update';
|
||||
END IF;
|
||||
|
||||
-- Create index for efficient band-based queries
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_vuln_instance_epss_band') THEN
|
||||
CREATE INDEX idx_vuln_instance_epss_band
|
||||
ON vuln_instance_triage (current_epss_band)
|
||||
WHERE current_epss_band IN ('CRITICAL', 'HIGH');
|
||||
END IF;
|
||||
|
||||
-- Create index for stale EPSS data detection
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_indexes WHERE indexname = 'idx_vuln_instance_epss_model_date') THEN
|
||||
CREATE INDEX idx_vuln_instance_epss_model_date
|
||||
ON vuln_instance_triage (epss_model_date);
|
||||
END IF;
|
||||
|
||||
RAISE NOTICE 'Added EPSS columns to vuln_instance_triage table';
|
||||
ELSE
|
||||
RAISE NOTICE 'Table vuln_instance_triage does not exist; skipping EPSS column additions';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- ============================================================================
|
||||
-- Batch Update Function for EPSS Enrichment
|
||||
-- ============================================================================
|
||||
-- Efficiently updates EPSS data for multiple vulnerability instances
|
||||
|
||||
CREATE OR REPLACE FUNCTION batch_update_epss_triage(
|
||||
p_updates JSONB,
|
||||
p_model_date DATE,
|
||||
p_updated_at TIMESTAMPTZ DEFAULT now()
|
||||
)
|
||||
RETURNS TABLE (
|
||||
updated_count INT,
|
||||
band_change_count INT
|
||||
) AS $$
|
||||
DECLARE
|
||||
v_updated INT := 0;
|
||||
v_band_changes INT := 0;
|
||||
v_row RECORD;
|
||||
BEGIN
|
||||
-- p_updates format: [{"instance_id": "...", "score": 0.123, "percentile": 0.456, "band": "HIGH"}, ...]
|
||||
FOR v_row IN SELECT * FROM jsonb_to_recordset(p_updates) AS x(
|
||||
instance_id UUID,
|
||||
score DOUBLE PRECISION,
|
||||
percentile DOUBLE PRECISION,
|
||||
band TEXT
|
||||
)
|
||||
LOOP
|
||||
UPDATE vuln_instance_triage SET
|
||||
previous_epss_band = current_epss_band,
|
||||
current_epss_score = v_row.score,
|
||||
current_epss_percentile = v_row.percentile,
|
||||
current_epss_band = v_row.band,
|
||||
epss_model_date = p_model_date,
|
||||
epss_updated_at = p_updated_at
|
||||
WHERE instance_id = v_row.instance_id
|
||||
AND (current_epss_band IS DISTINCT FROM v_row.band
|
||||
OR current_epss_score IS DISTINCT FROM v_row.score);
|
||||
|
||||
IF FOUND THEN
|
||||
v_updated := v_updated + 1;
|
||||
|
||||
-- Check if band actually changed
|
||||
IF (SELECT previous_epss_band FROM vuln_instance_triage WHERE instance_id = v_row.instance_id)
|
||||
IS DISTINCT FROM v_row.band THEN
|
||||
v_band_changes := v_band_changes + 1;
|
||||
END IF;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
RETURN QUERY SELECT v_updated, v_band_changes;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
COMMENT ON FUNCTION batch_update_epss_triage IS 'Batch updates EPSS data for vulnerability instances, tracking band changes';
|
||||
|
||||
-- ============================================================================
|
||||
-- View for Instances Needing EPSS Update
|
||||
-- ============================================================================
|
||||
-- Returns instances with stale or missing EPSS data
|
||||
|
||||
CREATE OR REPLACE VIEW v_epss_stale_instances AS
|
||||
SELECT
|
||||
vit.instance_id,
|
||||
vit.cve_id,
|
||||
vit.tenant_id,
|
||||
vit.current_epss_band,
|
||||
vit.epss_model_date,
|
||||
CURRENT_DATE - COALESCE(vit.epss_model_date, '1970-01-01'::DATE) AS days_stale
|
||||
FROM vuln_instance_triage vit
|
||||
WHERE vit.epss_model_date IS NULL
|
||||
OR vit.epss_model_date < CURRENT_DATE - 1;
|
||||
|
||||
COMMENT ON VIEW v_epss_stale_instances IS 'Instances with stale or missing EPSS data, needing enrichment';
|
||||
@@ -0,0 +1,177 @@
|
||||
-- =============================================================================
|
||||
-- Migration: 014_vuln_surfaces.sql
|
||||
-- Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core
|
||||
-- Task: SURF-014
|
||||
-- Description: Vulnerability surface storage for trigger method analysis.
|
||||
-- =============================================================================
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Prevent re-running
|
||||
DO $$ BEGIN
|
||||
IF EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = 'scanner' AND tablename = 'vuln_surfaces') THEN
|
||||
RAISE EXCEPTION 'Migration 014_vuln_surfaces already applied';
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- =============================================================================
|
||||
-- VULN_SURFACES: Computed vulnerability surface for CVE + package + version
|
||||
-- =============================================================================
|
||||
CREATE TABLE scanner.vuln_surfaces (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id UUID NOT NULL REFERENCES public.tenants(id),
|
||||
|
||||
-- CVE/vulnerability identity
|
||||
cve_id TEXT NOT NULL,
|
||||
package_ecosystem TEXT NOT NULL, -- 'nuget', 'npm', 'maven', 'pypi'
|
||||
package_name TEXT NOT NULL,
|
||||
vuln_version TEXT NOT NULL, -- Version with vulnerability
|
||||
fixed_version TEXT, -- First fixed version (null if no fix)
|
||||
|
||||
-- Surface computation metadata
|
||||
computed_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
computation_duration_ms INTEGER,
|
||||
fingerprint_method TEXT NOT NULL, -- 'cecil-il', 'babel-ast', 'asm-bytecode', 'python-ast'
|
||||
|
||||
-- Summary statistics
|
||||
total_methods_vuln INTEGER NOT NULL DEFAULT 0,
|
||||
total_methods_fixed INTEGER NOT NULL DEFAULT 0,
|
||||
changed_method_count INTEGER NOT NULL DEFAULT 0,
|
||||
|
||||
-- DSSE attestation (optional)
|
||||
attestation_digest TEXT,
|
||||
|
||||
-- Indexes for lookups
|
||||
CONSTRAINT uq_vuln_surface_key UNIQUE (tenant_id, cve_id, package_ecosystem, package_name, vuln_version)
|
||||
);
|
||||
|
||||
-- Indexes for common queries
|
||||
CREATE INDEX idx_vuln_surfaces_cve ON scanner.vuln_surfaces(tenant_id, cve_id);
|
||||
CREATE INDEX idx_vuln_surfaces_package ON scanner.vuln_surfaces(tenant_id, package_ecosystem, package_name);
|
||||
CREATE INDEX idx_vuln_surfaces_computed_at ON scanner.vuln_surfaces(computed_at DESC);
|
||||
|
||||
COMMENT ON TABLE scanner.vuln_surfaces IS 'Computed vulnerability surfaces identifying which methods changed between vulnerable and fixed versions';
|
||||
|
||||
-- =============================================================================
|
||||
-- VULN_SURFACE_SINKS: Individual trigger methods for a vulnerability surface
|
||||
-- =============================================================================
|
||||
CREATE TABLE scanner.vuln_surface_sinks (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
surface_id UUID NOT NULL REFERENCES scanner.vuln_surfaces(id) ON DELETE CASCADE,
|
||||
|
||||
-- Method identity
|
||||
method_key TEXT NOT NULL, -- Normalized method signature (FQN)
|
||||
method_name TEXT NOT NULL, -- Simple method name
|
||||
declaring_type TEXT NOT NULL, -- Containing class/module
|
||||
namespace TEXT, -- Namespace/package
|
||||
|
||||
-- Change classification
|
||||
change_type TEXT NOT NULL CHECK (change_type IN ('added', 'removed', 'modified')),
|
||||
|
||||
-- Fingerprints for comparison
|
||||
vuln_fingerprint TEXT, -- Hash in vulnerable version (null if added in fix)
|
||||
fixed_fingerprint TEXT, -- Hash in fixed version (null if removed in fix)
|
||||
|
||||
-- Metadata
|
||||
is_public BOOLEAN NOT NULL DEFAULT true,
|
||||
parameter_count INTEGER,
|
||||
return_type TEXT,
|
||||
|
||||
-- Source location (if available from debug symbols)
|
||||
source_file TEXT,
|
||||
start_line INTEGER,
|
||||
end_line INTEGER,
|
||||
|
||||
-- Indexes for lookups
|
||||
CONSTRAINT uq_surface_sink_key UNIQUE (surface_id, method_key)
|
||||
);
|
||||
|
||||
-- Indexes for common queries
|
||||
CREATE INDEX idx_vuln_surface_sinks_surface ON scanner.vuln_surface_sinks(surface_id);
|
||||
CREATE INDEX idx_vuln_surface_sinks_method ON scanner.vuln_surface_sinks(method_name);
|
||||
CREATE INDEX idx_vuln_surface_sinks_type ON scanner.vuln_surface_sinks(declaring_type);
|
||||
|
||||
COMMENT ON TABLE scanner.vuln_surface_sinks IS 'Individual methods that changed between vulnerable and fixed package versions';
|
||||
|
||||
-- =============================================================================
|
||||
-- VULN_SURFACE_TRIGGERS: Links sinks to call graph nodes where they are invoked
|
||||
-- =============================================================================
|
||||
CREATE TABLE scanner.vuln_surface_triggers (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
sink_id UUID NOT NULL REFERENCES scanner.vuln_surface_sinks(id) ON DELETE CASCADE,
|
||||
scan_id UUID NOT NULL, -- References scanner.scans
|
||||
|
||||
-- Caller identity
|
||||
caller_node_id TEXT NOT NULL, -- Call graph node ID
|
||||
caller_method_key TEXT NOT NULL, -- FQN of calling method
|
||||
caller_file TEXT, -- Source file of caller
|
||||
caller_line INTEGER, -- Line number of call
|
||||
|
||||
-- Reachability analysis
|
||||
reachability_bucket TEXT NOT NULL DEFAULT 'unknown', -- 'entrypoint', 'direct', 'runtime', 'unknown', 'unreachable'
|
||||
path_length INTEGER, -- Shortest path from entrypoint
|
||||
confidence REAL NOT NULL DEFAULT 0.5,
|
||||
|
||||
-- Evidence
|
||||
call_type TEXT NOT NULL DEFAULT 'direct', -- 'direct', 'virtual', 'interface', 'reflection'
|
||||
is_conditional BOOLEAN NOT NULL DEFAULT false,
|
||||
|
||||
-- Indexes for lookups
|
||||
CONSTRAINT uq_trigger_key UNIQUE (sink_id, scan_id, caller_node_id)
|
||||
);
|
||||
|
||||
-- Indexes for common queries
|
||||
CREATE INDEX idx_vuln_surface_triggers_sink ON scanner.vuln_surface_triggers(sink_id);
|
||||
CREATE INDEX idx_vuln_surface_triggers_scan ON scanner.vuln_surface_triggers(scan_id);
|
||||
CREATE INDEX idx_vuln_surface_triggers_bucket ON scanner.vuln_surface_triggers(reachability_bucket);
|
||||
|
||||
COMMENT ON TABLE scanner.vuln_surface_triggers IS 'Links between vulnerability sink methods and their callers in analyzed code';
|
||||
|
||||
-- =============================================================================
|
||||
-- RLS (Row Level Security)
|
||||
-- =============================================================================
|
||||
ALTER TABLE scanner.vuln_surfaces ENABLE ROW LEVEL SECURITY;
|
||||
|
||||
-- Tenant isolation policy
|
||||
CREATE POLICY vuln_surfaces_tenant_isolation ON scanner.vuln_surfaces
|
||||
USING (tenant_id = current_setting('app.tenant_id', true)::uuid);
|
||||
|
||||
-- Note: vuln_surface_sinks and triggers inherit isolation through FK to surfaces
|
||||
|
||||
-- =============================================================================
|
||||
-- FUNCTIONS
|
||||
-- =============================================================================
|
||||
|
||||
-- Get surface statistics for a CVE
|
||||
CREATE OR REPLACE FUNCTION scanner.get_vuln_surface_stats(
|
||||
p_tenant_id UUID,
|
||||
p_cve_id TEXT
|
||||
)
|
||||
RETURNS TABLE (
|
||||
package_ecosystem TEXT,
|
||||
package_name TEXT,
|
||||
vuln_version TEXT,
|
||||
fixed_version TEXT,
|
||||
changed_method_count INTEGER,
|
||||
trigger_count BIGINT
|
||||
) AS $$
|
||||
BEGIN
|
||||
RETURN QUERY
|
||||
SELECT
|
||||
vs.package_ecosystem,
|
||||
vs.package_name,
|
||||
vs.vuln_version,
|
||||
vs.fixed_version,
|
||||
vs.changed_method_count,
|
||||
COUNT(DISTINCT vst.id)::BIGINT AS trigger_count
|
||||
FROM scanner.vuln_surfaces vs
|
||||
LEFT JOIN scanner.vuln_surface_sinks vss ON vss.surface_id = vs.id
|
||||
LEFT JOIN scanner.vuln_surface_triggers vst ON vst.sink_id = vss.id
|
||||
WHERE vs.tenant_id = p_tenant_id
|
||||
AND vs.cve_id = p_cve_id
|
||||
GROUP BY vs.id, vs.package_ecosystem, vs.package_name, vs.vuln_version, vs.fixed_version, vs.changed_method_count
|
||||
ORDER BY vs.package_ecosystem, vs.package_name;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql STABLE;
|
||||
|
||||
COMMIT;
|
||||
@@ -15,4 +15,6 @@ internal static class MigrationIds
|
||||
public const string EpssRawLayer = "011_epss_raw_layer.sql";
|
||||
public const string EpssSignalLayer = "012_epss_signal_layer.sql";
|
||||
public const string WitnessStorage = "013_witness_storage.sql";
|
||||
public const string EpssTriageColumns = "014_epss_triage_columns.sql";
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,228 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PostgresEpssRawRepository.cs
|
||||
// Sprint: SPRINT_3413_0001_0001_epss_live_enrichment
|
||||
// Task: R1-R4 - EPSS Raw Feed Layer
|
||||
// Description: PostgreSQL implementation of IEpssRawRepository.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Dapper;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IEpssRawRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresEpssRawRepository : IEpssRawRepository
|
||||
{
|
||||
private readonly ScannerDataSource _dataSource;
|
||||
|
||||
private string SchemaName => _dataSource.SchemaName ?? ScannerDataSource.DefaultSchema;
|
||||
private string RawTable => $"{SchemaName}.epss_raw";
|
||||
|
||||
public PostgresEpssRawRepository(ScannerDataSource dataSource)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
}
|
||||
|
||||
public async Task<EpssRaw> CreateAsync(EpssRaw raw, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(raw);
|
||||
|
||||
var sql = $"""
|
||||
INSERT INTO {RawTable} (
|
||||
source_uri, asof_date, payload, payload_sha256,
|
||||
header_comment, model_version, published_date,
|
||||
row_count, compressed_size, decompressed_size, import_run_id
|
||||
)
|
||||
VALUES (
|
||||
@SourceUri, @AsOfDate, @Payload::jsonb, @PayloadSha256,
|
||||
@HeaderComment, @ModelVersion, @PublishedDate,
|
||||
@RowCount, @CompressedSize, @DecompressedSize, @ImportRunId
|
||||
)
|
||||
ON CONFLICT (source_uri, asof_date, payload_sha256) DO NOTHING
|
||||
RETURNING raw_id, ingestion_ts
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
|
||||
var result = await connection.QueryFirstOrDefaultAsync<(long raw_id, DateTimeOffset ingestion_ts)?>(sql, new
|
||||
{
|
||||
raw.SourceUri,
|
||||
AsOfDate = raw.AsOfDate.ToDateTime(TimeOnly.MinValue),
|
||||
raw.Payload,
|
||||
raw.PayloadSha256,
|
||||
raw.HeaderComment,
|
||||
raw.ModelVersion,
|
||||
PublishedDate = raw.PublishedDate?.ToDateTime(TimeOnly.MinValue),
|
||||
raw.RowCount,
|
||||
raw.CompressedSize,
|
||||
raw.DecompressedSize,
|
||||
raw.ImportRunId
|
||||
});
|
||||
|
||||
if (result.HasValue)
|
||||
{
|
||||
return raw with
|
||||
{
|
||||
RawId = result.Value.raw_id,
|
||||
IngestionTs = result.Value.ingestion_ts
|
||||
};
|
||||
}
|
||||
|
||||
// Record already exists (idempotency), fetch existing
|
||||
var existing = await GetByDateAsync(raw.AsOfDate, cancellationToken);
|
||||
return existing ?? raw;
|
||||
}
|
||||
|
||||
public async Task<EpssRaw?> GetByDateAsync(DateOnly asOfDate, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
SELECT
|
||||
raw_id, source_uri, asof_date, ingestion_ts,
|
||||
payload, payload_sha256, header_comment, model_version, published_date,
|
||||
row_count, compressed_size, decompressed_size, import_run_id
|
||||
FROM {RawTable}
|
||||
WHERE asof_date = @AsOfDate
|
||||
ORDER BY ingestion_ts DESC
|
||||
LIMIT 1
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
var row = await connection.QueryFirstOrDefaultAsync<RawRow?>(sql, new
|
||||
{
|
||||
AsOfDate = asOfDate.ToDateTime(TimeOnly.MinValue)
|
||||
});
|
||||
|
||||
return row.HasValue ? MapToRaw(row.Value) : null;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<EpssRaw>> GetByDateRangeAsync(
|
||||
DateOnly startDate,
|
||||
DateOnly endDate,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
SELECT
|
||||
raw_id, source_uri, asof_date, ingestion_ts,
|
||||
payload, payload_sha256, header_comment, model_version, published_date,
|
||||
row_count, compressed_size, decompressed_size, import_run_id
|
||||
FROM {RawTable}
|
||||
WHERE asof_date >= @StartDate AND asof_date <= @EndDate
|
||||
ORDER BY asof_date DESC
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
var rows = await connection.QueryAsync<RawRow>(sql, new
|
||||
{
|
||||
StartDate = startDate.ToDateTime(TimeOnly.MinValue),
|
||||
EndDate = endDate.ToDateTime(TimeOnly.MinValue)
|
||||
});
|
||||
|
||||
return rows.Select(MapToRaw).ToList();
|
||||
}
|
||||
|
||||
public async Task<EpssRaw?> GetLatestAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
SELECT
|
||||
raw_id, source_uri, asof_date, ingestion_ts,
|
||||
payload, payload_sha256, header_comment, model_version, published_date,
|
||||
row_count, compressed_size, decompressed_size, import_run_id
|
||||
FROM {RawTable}
|
||||
ORDER BY asof_date DESC, ingestion_ts DESC
|
||||
LIMIT 1
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
var row = await connection.QueryFirstOrDefaultAsync<RawRow?>(sql);
|
||||
|
||||
return row.HasValue ? MapToRaw(row.Value) : null;
|
||||
}
|
||||
|
||||
public async Task<bool> ExistsAsync(DateOnly asOfDate, byte[] payloadSha256, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM {RawTable}
|
||||
WHERE asof_date = @AsOfDate AND payload_sha256 = @PayloadSha256
|
||||
)
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
return await connection.ExecuteScalarAsync<bool>(sql, new
|
||||
{
|
||||
AsOfDate = asOfDate.ToDateTime(TimeOnly.MinValue),
|
||||
PayloadSha256 = payloadSha256
|
||||
});
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<EpssRaw>> GetByModelVersionAsync(
|
||||
string modelVersion,
|
||||
int limit = 100,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
SELECT
|
||||
raw_id, source_uri, asof_date, ingestion_ts,
|
||||
payload, payload_sha256, header_comment, model_version, published_date,
|
||||
row_count, compressed_size, decompressed_size, import_run_id
|
||||
FROM {RawTable}
|
||||
WHERE model_version = @ModelVersion
|
||||
ORDER BY asof_date DESC
|
||||
LIMIT @Limit
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
var rows = await connection.QueryAsync<RawRow>(sql, new
|
||||
{
|
||||
ModelVersion = modelVersion,
|
||||
Limit = limit
|
||||
});
|
||||
|
||||
return rows.Select(MapToRaw).ToList();
|
||||
}
|
||||
|
||||
public async Task<int> PruneAsync(int retentionDays = 365, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"SELECT {SchemaName}.prune_epss_raw(@RetentionDays)";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
return await connection.ExecuteScalarAsync<int>(sql, new { RetentionDays = retentionDays });
|
||||
}
|
||||
|
||||
private static EpssRaw MapToRaw(RawRow row)
|
||||
{
|
||||
return new EpssRaw
|
||||
{
|
||||
RawId = row.raw_id,
|
||||
SourceUri = row.source_uri,
|
||||
AsOfDate = DateOnly.FromDateTime(row.asof_date),
|
||||
IngestionTs = row.ingestion_ts,
|
||||
Payload = row.payload,
|
||||
PayloadSha256 = row.payload_sha256,
|
||||
HeaderComment = row.header_comment,
|
||||
ModelVersion = row.model_version,
|
||||
PublishedDate = row.published_date.HasValue ? DateOnly.FromDateTime(row.published_date.Value) : null,
|
||||
RowCount = row.row_count,
|
||||
CompressedSize = row.compressed_size,
|
||||
DecompressedSize = row.decompressed_size,
|
||||
ImportRunId = row.import_run_id
|
||||
};
|
||||
}
|
||||
|
||||
private readonly record struct RawRow(
|
||||
long raw_id,
|
||||
string source_uri,
|
||||
DateTime asof_date,
|
||||
DateTimeOffset ingestion_ts,
|
||||
string payload,
|
||||
byte[] payload_sha256,
|
||||
string? header_comment,
|
||||
string? model_version,
|
||||
DateTime? published_date,
|
||||
int row_count,
|
||||
long? compressed_size,
|
||||
long? decompressed_size,
|
||||
Guid? import_run_id);
|
||||
}
|
||||
@@ -9,6 +9,7 @@ using System.Data;
|
||||
using Dapper;
|
||||
using Npgsql;
|
||||
using NpgsqlTypes;
|
||||
using StellaOps.Scanner.Core.Epss;
|
||||
using StellaOps.Scanner.Storage.Epss;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
@@ -481,6 +482,61 @@ public sealed class PostgresEpssRepository : IEpssRepository
|
||||
cancellationToken: cancellationToken)).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<EpssChangeRecord>> GetChangesAsync(
|
||||
DateOnly modelDate,
|
||||
Core.Epss.EpssChangeFlags? flags = null,
|
||||
int limit = 100000,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
SELECT
|
||||
cve_id,
|
||||
flags,
|
||||
prev_score,
|
||||
new_score,
|
||||
new_percentile,
|
||||
prev_band,
|
||||
model_date
|
||||
FROM {ChangesTable}
|
||||
WHERE model_date = @ModelDate
|
||||
{(flags.HasValue ? "AND (flags & @Flags) != 0" : "")}
|
||||
ORDER BY new_score DESC
|
||||
LIMIT @Limit
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
|
||||
var rows = await connection.QueryAsync<ChangeRow>(sql, new
|
||||
{
|
||||
ModelDate = modelDate,
|
||||
Flags = flags.HasValue ? (int)flags.Value : 0,
|
||||
Limit = limit
|
||||
});
|
||||
|
||||
return rows.Select(r => new EpssChangeRecord
|
||||
{
|
||||
CveId = r.cve_id,
|
||||
Flags = (Core.Epss.EpssChangeFlags)r.flags,
|
||||
PreviousScore = r.prev_score,
|
||||
NewScore = r.new_score,
|
||||
NewPercentile = r.new_percentile,
|
||||
PreviousBand = (Core.Epss.EpssPriorityBand)r.prev_band,
|
||||
ModelDate = r.model_date
|
||||
}).ToList();
|
||||
}
|
||||
|
||||
private sealed class ChangeRow
|
||||
{
|
||||
public string cve_id { get; set; } = "";
|
||||
public int flags { get; set; }
|
||||
public double? prev_score { get; set; }
|
||||
public double new_score { get; set; }
|
||||
public double new_percentile { get; set; }
|
||||
public int prev_band { get; set; }
|
||||
public DateOnly model_date { get; set; }
|
||||
}
|
||||
|
||||
private sealed class StageCounts
|
||||
{
|
||||
public int distinct_count { get; set; }
|
||||
|
||||
@@ -0,0 +1,395 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PostgresEpssSignalRepository.cs
|
||||
// Sprint: SPRINT_3413_0001_0001_epss_live_enrichment
|
||||
// Task: S3 - Implement PostgresEpssSignalRepository
|
||||
// Description: PostgreSQL implementation of IEpssSignalRepository.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using Dapper;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IEpssSignalRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresEpssSignalRepository : IEpssSignalRepository
|
||||
{
|
||||
private readonly ScannerDataSource _dataSource;
|
||||
|
||||
private string SchemaName => _dataSource.SchemaName ?? ScannerDataSource.DefaultSchema;
|
||||
private string SignalTable => $"{SchemaName}.epss_signal";
|
||||
private string ConfigTable => $"{SchemaName}.epss_signal_config";
|
||||
|
||||
public PostgresEpssSignalRepository(ScannerDataSource dataSource)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
}
|
||||
|
||||
public async Task<EpssSignal> CreateAsync(EpssSignal signal, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(signal);
|
||||
|
||||
var sql = $"""
|
||||
INSERT INTO {SignalTable} (
|
||||
tenant_id, model_date, cve_id, event_type, risk_band,
|
||||
epss_score, epss_delta, percentile, percentile_delta,
|
||||
is_model_change, model_version, dedupe_key, explain_hash, payload
|
||||
)
|
||||
VALUES (
|
||||
@TenantId, @ModelDate, @CveId, @EventType, @RiskBand,
|
||||
@EpssScore, @EpssDelta, @Percentile, @PercentileDelta,
|
||||
@IsModelChange, @ModelVersion, @DedupeKey, @ExplainHash, @Payload::jsonb
|
||||
)
|
||||
ON CONFLICT (tenant_id, dedupe_key) DO NOTHING
|
||||
RETURNING signal_id, created_at
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
|
||||
var result = await connection.QueryFirstOrDefaultAsync<(long signal_id, DateTimeOffset created_at)?>(sql, new
|
||||
{
|
||||
signal.TenantId,
|
||||
ModelDate = signal.ModelDate.ToDateTime(TimeOnly.MinValue),
|
||||
signal.CveId,
|
||||
signal.EventType,
|
||||
signal.RiskBand,
|
||||
signal.EpssScore,
|
||||
signal.EpssDelta,
|
||||
signal.Percentile,
|
||||
signal.PercentileDelta,
|
||||
signal.IsModelChange,
|
||||
signal.ModelVersion,
|
||||
signal.DedupeKey,
|
||||
signal.ExplainHash,
|
||||
signal.Payload
|
||||
});
|
||||
|
||||
if (result.HasValue)
|
||||
{
|
||||
return signal with
|
||||
{
|
||||
SignalId = result.Value.signal_id,
|
||||
CreatedAt = result.Value.created_at
|
||||
};
|
||||
}
|
||||
|
||||
// Signal already exists (dedupe), fetch existing
|
||||
var existing = await GetByDedupeKeyAsync(signal.TenantId, signal.DedupeKey, cancellationToken);
|
||||
return existing ?? signal;
|
||||
}
|
||||
|
||||
public async Task<int> CreateBulkAsync(IEnumerable<EpssSignal> signals, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(signals);
|
||||
|
||||
var signalList = signals.ToList();
|
||||
if (signalList.Count == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var sql = $"""
|
||||
INSERT INTO {SignalTable} (
|
||||
tenant_id, model_date, cve_id, event_type, risk_band,
|
||||
epss_score, epss_delta, percentile, percentile_delta,
|
||||
is_model_change, model_version, dedupe_key, explain_hash, payload
|
||||
)
|
||||
VALUES (
|
||||
@TenantId, @ModelDate, @CveId, @EventType, @RiskBand,
|
||||
@EpssScore, @EpssDelta, @Percentile, @PercentileDelta,
|
||||
@IsModelChange, @ModelVersion, @DedupeKey, @ExplainHash, @Payload::jsonb
|
||||
)
|
||||
ON CONFLICT (tenant_id, dedupe_key) DO NOTHING
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
await using var transaction = await connection.BeginTransactionAsync(cancellationToken);
|
||||
|
||||
var inserted = 0;
|
||||
foreach (var signal in signalList)
|
||||
{
|
||||
var affected = await connection.ExecuteAsync(sql, new
|
||||
{
|
||||
signal.TenantId,
|
||||
ModelDate = signal.ModelDate.ToDateTime(TimeOnly.MinValue),
|
||||
signal.CveId,
|
||||
signal.EventType,
|
||||
signal.RiskBand,
|
||||
signal.EpssScore,
|
||||
signal.EpssDelta,
|
||||
signal.Percentile,
|
||||
signal.PercentileDelta,
|
||||
signal.IsModelChange,
|
||||
signal.ModelVersion,
|
||||
signal.DedupeKey,
|
||||
signal.ExplainHash,
|
||||
signal.Payload
|
||||
}, transaction);
|
||||
|
||||
inserted += affected;
|
||||
}
|
||||
|
||||
await transaction.CommitAsync(cancellationToken);
|
||||
return inserted;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<EpssSignal>> GetByTenantAsync(
|
||||
Guid tenantId,
|
||||
DateOnly startDate,
|
||||
DateOnly endDate,
|
||||
IEnumerable<string>? eventTypes = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var eventTypeList = eventTypes?.ToList();
|
||||
var hasEventTypeFilter = eventTypeList?.Count > 0;
|
||||
|
||||
var sql = $"""
|
||||
SELECT
|
||||
signal_id, tenant_id, model_date, cve_id, event_type, risk_band,
|
||||
epss_score, epss_delta, percentile, percentile_delta,
|
||||
is_model_change, model_version, dedupe_key, explain_hash, payload, created_at
|
||||
FROM {SignalTable}
|
||||
WHERE tenant_id = @TenantId
|
||||
AND model_date >= @StartDate
|
||||
AND model_date <= @EndDate
|
||||
{(hasEventTypeFilter ? "AND event_type = ANY(@EventTypes)" : "")}
|
||||
ORDER BY model_date DESC, created_at DESC
|
||||
LIMIT 10000
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
|
||||
var rows = await connection.QueryAsync<SignalRow>(sql, new
|
||||
{
|
||||
TenantId = tenantId,
|
||||
StartDate = startDate.ToDateTime(TimeOnly.MinValue),
|
||||
EndDate = endDate.ToDateTime(TimeOnly.MinValue),
|
||||
EventTypes = eventTypeList?.ToArray()
|
||||
});
|
||||
|
||||
return rows.Select(MapToSignal).ToList();
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<EpssSignal>> GetByCveAsync(
|
||||
Guid tenantId,
|
||||
string cveId,
|
||||
int limit = 100,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
SELECT
|
||||
signal_id, tenant_id, model_date, cve_id, event_type, risk_band,
|
||||
epss_score, epss_delta, percentile, percentile_delta,
|
||||
is_model_change, model_version, dedupe_key, explain_hash, payload, created_at
|
||||
FROM {SignalTable}
|
||||
WHERE tenant_id = @TenantId
|
||||
AND cve_id = @CveId
|
||||
ORDER BY model_date DESC, created_at DESC
|
||||
LIMIT @Limit
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
|
||||
var rows = await connection.QueryAsync<SignalRow>(sql, new
|
||||
{
|
||||
TenantId = tenantId,
|
||||
CveId = cveId,
|
||||
Limit = limit
|
||||
});
|
||||
|
||||
return rows.Select(MapToSignal).ToList();
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<EpssSignal>> GetHighPriorityAsync(
|
||||
Guid tenantId,
|
||||
DateOnly startDate,
|
||||
DateOnly endDate,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
SELECT
|
||||
signal_id, tenant_id, model_date, cve_id, event_type, risk_band,
|
||||
epss_score, epss_delta, percentile, percentile_delta,
|
||||
is_model_change, model_version, dedupe_key, explain_hash, payload, created_at
|
||||
FROM {SignalTable}
|
||||
WHERE tenant_id = @TenantId
|
||||
AND model_date >= @StartDate
|
||||
AND model_date <= @EndDate
|
||||
AND risk_band IN ('CRITICAL', 'HIGH')
|
||||
ORDER BY model_date DESC, created_at DESC
|
||||
LIMIT 10000
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
|
||||
var rows = await connection.QueryAsync<SignalRow>(sql, new
|
||||
{
|
||||
TenantId = tenantId,
|
||||
StartDate = startDate.ToDateTime(TimeOnly.MinValue),
|
||||
EndDate = endDate.ToDateTime(TimeOnly.MinValue)
|
||||
});
|
||||
|
||||
return rows.Select(MapToSignal).ToList();
|
||||
}
|
||||
|
||||
public async Task<EpssSignalConfig?> GetConfigAsync(Guid tenantId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
SELECT
|
||||
config_id, tenant_id,
|
||||
critical_percentile, high_percentile, medium_percentile,
|
||||
big_jump_delta, suppress_on_model_change, enabled_event_types,
|
||||
created_at, updated_at
|
||||
FROM {ConfigTable}
|
||||
WHERE tenant_id = @TenantId
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
|
||||
var row = await connection.QueryFirstOrDefaultAsync<ConfigRow?>(sql, new { TenantId = tenantId });
|
||||
|
||||
return row.HasValue ? MapToConfig(row.Value) : null;
|
||||
}
|
||||
|
||||
public async Task<EpssSignalConfig> UpsertConfigAsync(EpssSignalConfig config, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(config);
|
||||
|
||||
var sql = $"""
|
||||
INSERT INTO {ConfigTable} (
|
||||
tenant_id, critical_percentile, high_percentile, medium_percentile,
|
||||
big_jump_delta, suppress_on_model_change, enabled_event_types
|
||||
)
|
||||
VALUES (
|
||||
@TenantId, @CriticalPercentile, @HighPercentile, @MediumPercentile,
|
||||
@BigJumpDelta, @SuppressOnModelChange, @EnabledEventTypes
|
||||
)
|
||||
ON CONFLICT (tenant_id) DO UPDATE SET
|
||||
critical_percentile = EXCLUDED.critical_percentile,
|
||||
high_percentile = EXCLUDED.high_percentile,
|
||||
medium_percentile = EXCLUDED.medium_percentile,
|
||||
big_jump_delta = EXCLUDED.big_jump_delta,
|
||||
suppress_on_model_change = EXCLUDED.suppress_on_model_change,
|
||||
enabled_event_types = EXCLUDED.enabled_event_types,
|
||||
updated_at = now()
|
||||
RETURNING config_id, created_at, updated_at
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
|
||||
var result = await connection.QueryFirstAsync<(Guid config_id, DateTimeOffset created_at, DateTimeOffset updated_at)>(sql, new
|
||||
{
|
||||
config.TenantId,
|
||||
config.CriticalPercentile,
|
||||
config.HighPercentile,
|
||||
config.MediumPercentile,
|
||||
config.BigJumpDelta,
|
||||
config.SuppressOnModelChange,
|
||||
EnabledEventTypes = config.EnabledEventTypes.ToArray()
|
||||
});
|
||||
|
||||
return config with
|
||||
{
|
||||
ConfigId = result.config_id,
|
||||
CreatedAt = result.created_at,
|
||||
UpdatedAt = result.updated_at
|
||||
};
|
||||
}
|
||||
|
||||
public async Task<int> PruneAsync(int retentionDays = 90, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"SELECT {SchemaName}.prune_epss_signals(@RetentionDays)";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
return await connection.ExecuteScalarAsync<int>(sql, new { RetentionDays = retentionDays });
|
||||
}
|
||||
|
||||
private async Task<EpssSignal?> GetByDedupeKeyAsync(Guid tenantId, string dedupeKey, CancellationToken cancellationToken)
|
||||
{
|
||||
var sql = $"""
|
||||
SELECT
|
||||
signal_id, tenant_id, model_date, cve_id, event_type, risk_band,
|
||||
epss_score, epss_delta, percentile, percentile_delta,
|
||||
is_model_change, model_version, dedupe_key, explain_hash, payload, created_at
|
||||
FROM {SignalTable}
|
||||
WHERE tenant_id = @TenantId AND dedupe_key = @DedupeKey
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
var row = await connection.QueryFirstOrDefaultAsync<SignalRow?>(sql, new { TenantId = tenantId, DedupeKey = dedupeKey });
|
||||
|
||||
return row.HasValue ? MapToSignal(row.Value) : null;
|
||||
}
|
||||
|
||||
private static EpssSignal MapToSignal(SignalRow row)
|
||||
{
|
||||
return new EpssSignal
|
||||
{
|
||||
SignalId = row.signal_id,
|
||||
TenantId = row.tenant_id,
|
||||
ModelDate = DateOnly.FromDateTime(row.model_date),
|
||||
CveId = row.cve_id,
|
||||
EventType = row.event_type,
|
||||
RiskBand = row.risk_band,
|
||||
EpssScore = row.epss_score,
|
||||
EpssDelta = row.epss_delta,
|
||||
Percentile = row.percentile,
|
||||
PercentileDelta = row.percentile_delta,
|
||||
IsModelChange = row.is_model_change,
|
||||
ModelVersion = row.model_version,
|
||||
DedupeKey = row.dedupe_key,
|
||||
ExplainHash = row.explain_hash,
|
||||
Payload = row.payload,
|
||||
CreatedAt = row.created_at
|
||||
};
|
||||
}
|
||||
|
||||
private static EpssSignalConfig MapToConfig(ConfigRow row)
|
||||
{
|
||||
return new EpssSignalConfig
|
||||
{
|
||||
ConfigId = row.config_id,
|
||||
TenantId = row.tenant_id,
|
||||
CriticalPercentile = row.critical_percentile,
|
||||
HighPercentile = row.high_percentile,
|
||||
MediumPercentile = row.medium_percentile,
|
||||
BigJumpDelta = row.big_jump_delta,
|
||||
SuppressOnModelChange = row.suppress_on_model_change,
|
||||
EnabledEventTypes = row.enabled_event_types ?? Array.Empty<string>(),
|
||||
CreatedAt = row.created_at,
|
||||
UpdatedAt = row.updated_at
|
||||
};
|
||||
}
|
||||
|
||||
private readonly record struct SignalRow(
|
||||
long signal_id,
|
||||
Guid tenant_id,
|
||||
DateTime model_date,
|
||||
string cve_id,
|
||||
string event_type,
|
||||
string? risk_band,
|
||||
double? epss_score,
|
||||
double? epss_delta,
|
||||
double? percentile,
|
||||
double? percentile_delta,
|
||||
bool is_model_change,
|
||||
string? model_version,
|
||||
string dedupe_key,
|
||||
byte[] explain_hash,
|
||||
string payload,
|
||||
DateTimeOffset created_at);
|
||||
|
||||
private readonly record struct ConfigRow(
|
||||
Guid config_id,
|
||||
Guid tenant_id,
|
||||
double critical_percentile,
|
||||
double high_percentile,
|
||||
double medium_percentile,
|
||||
double big_jump_delta,
|
||||
bool suppress_on_model_change,
|
||||
string[]? enabled_event_types,
|
||||
DateTimeOffset created_at,
|
||||
DateTimeOffset updated_at);
|
||||
}
|
||||
@@ -0,0 +1,152 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PostgresObservedCveRepository.cs
|
||||
// Sprint: SPRINT_3413_0001_0001_epss_live_enrichment
|
||||
// Task: S6 - Add observed CVEs filter
|
||||
// Description: PostgreSQL implementation of IObservedCveRepository.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Dapper;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IObservedCveRepository"/>.
|
||||
/// Queries vuln_instance_triage to determine which CVEs are observed per tenant.
|
||||
/// </summary>
|
||||
public sealed class PostgresObservedCveRepository : IObservedCveRepository
|
||||
{
|
||||
private readonly ScannerDataSource _dataSource;
|
||||
|
||||
private string SchemaName => _dataSource.SchemaName ?? ScannerDataSource.DefaultSchema;
|
||||
private string TriageTable => $"{SchemaName}.vuln_instance_triage";
|
||||
|
||||
public PostgresObservedCveRepository(ScannerDataSource dataSource)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
}
|
||||
|
||||
public async Task<IReadOnlySet<string>> GetObservedCvesAsync(
|
||||
Guid tenantId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
SELECT DISTINCT cve_id
|
||||
FROM {TriageTable}
|
||||
WHERE tenant_id = @TenantId
|
||||
AND cve_id IS NOT NULL
|
||||
AND cve_id LIKE 'CVE-%'
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
var cves = await connection.QueryAsync<string>(sql, new { TenantId = tenantId });
|
||||
|
||||
return new HashSet<string>(cves, StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
public async Task<bool> IsObservedAsync(
|
||||
Guid tenantId,
|
||||
string cveId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM {TriageTable}
|
||||
WHERE tenant_id = @TenantId
|
||||
AND cve_id = @CveId
|
||||
)
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
return await connection.ExecuteScalarAsync<bool>(sql, new { TenantId = tenantId, CveId = cveId });
|
||||
}
|
||||
|
||||
public async Task<IReadOnlySet<string>> FilterObservedAsync(
|
||||
Guid tenantId,
|
||||
IEnumerable<string> cveIds,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var cveList = cveIds.ToList();
|
||||
if (cveList.Count == 0)
|
||||
{
|
||||
return new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
var sql = $"""
|
||||
SELECT DISTINCT cve_id
|
||||
FROM {TriageTable}
|
||||
WHERE tenant_id = @TenantId
|
||||
AND cve_id = ANY(@CveIds)
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
var observed = await connection.QueryAsync<string>(sql, new
|
||||
{
|
||||
TenantId = tenantId,
|
||||
CveIds = cveList.ToArray()
|
||||
});
|
||||
|
||||
return new HashSet<string>(observed, StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<Guid>> GetActiveTenantsAsync(
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
SELECT DISTINCT tenant_id
|
||||
FROM {TriageTable}
|
||||
WHERE cve_id IS NOT NULL
|
||||
AND cve_id LIKE 'CVE-%'
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
var tenants = await connection.QueryAsync<Guid>(sql);
|
||||
|
||||
return tenants.ToList();
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyDictionary<string, IReadOnlyList<Guid>>> GetTenantsObservingCvesAsync(
|
||||
IEnumerable<string> cveIds,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var cveList = cveIds.ToList();
|
||||
if (cveList.Count == 0)
|
||||
{
|
||||
return new Dictionary<string, IReadOnlyList<Guid>>(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
var sql = $"""
|
||||
SELECT cve_id, tenant_id
|
||||
FROM {TriageTable}
|
||||
WHERE cve_id = ANY(@CveIds)
|
||||
GROUP BY cve_id, tenant_id
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
var rows = await connection.QueryAsync<(string cve_id, Guid tenant_id)>(sql, new
|
||||
{
|
||||
CveIds = cveList.ToArray()
|
||||
});
|
||||
|
||||
var result = new Dictionary<string, List<Guid>>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var row in rows)
|
||||
{
|
||||
if (!result.TryGetValue(row.cve_id, out var tenants))
|
||||
{
|
||||
tenants = new List<Guid>();
|
||||
result[row.cve_id] = tenants;
|
||||
}
|
||||
|
||||
if (!tenants.Contains(row.tenant_id))
|
||||
{
|
||||
tenants.Add(row.tenant_id);
|
||||
}
|
||||
}
|
||||
|
||||
return result.ToDictionary(
|
||||
kvp => kvp.Key,
|
||||
kvp => (IReadOnlyList<Guid>)kvp.Value,
|
||||
StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,152 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IEpssRawRepository.cs
|
||||
// Sprint: SPRINT_3413_0001_0001_epss_live_enrichment
|
||||
// Task: R1-R4 - EPSS Raw Feed Layer
|
||||
// Description: Repository interface for immutable EPSS raw payload storage.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Repository for immutable EPSS raw payload storage.
|
||||
/// Layer 1 of the 3-layer EPSS architecture: stores full CSV payload as JSONB.
|
||||
/// </summary>
|
||||
public interface IEpssRawRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Stores a raw EPSS payload.
|
||||
/// </summary>
|
||||
/// <param name="raw">The raw payload to store.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The stored record with generated ID.</returns>
|
||||
Task<EpssRaw> CreateAsync(EpssRaw raw, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a raw payload by as-of date.
|
||||
/// </summary>
|
||||
/// <param name="asOfDate">The date of the EPSS snapshot.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The raw payload, or null if not found.</returns>
|
||||
Task<EpssRaw?> GetByDateAsync(DateOnly asOfDate, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets raw payloads within a date range.
|
||||
/// </summary>
|
||||
/// <param name="startDate">Start date (inclusive).</param>
|
||||
/// <param name="endDate">End date (inclusive).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>List of raw payloads ordered by date descending.</returns>
|
||||
Task<IReadOnlyList<EpssRaw>> GetByDateRangeAsync(
|
||||
DateOnly startDate,
|
||||
DateOnly endDate,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the most recent raw payload.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The most recent raw payload, or null if none exist.</returns>
|
||||
Task<EpssRaw?> GetLatestAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a raw payload exists for a given date and content hash.
|
||||
/// Used for idempotency checks.
|
||||
/// </summary>
|
||||
/// <param name="asOfDate">The date of the EPSS snapshot.</param>
|
||||
/// <param name="payloadSha256">SHA-256 hash of decompressed content.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if the payload already exists.</returns>
|
||||
Task<bool> ExistsAsync(DateOnly asOfDate, byte[] payloadSha256, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets payloads by model version.
|
||||
/// Useful for detecting model version changes.
|
||||
/// </summary>
|
||||
/// <param name="modelVersion">The model version string.</param>
|
||||
/// <param name="limit">Maximum number of records to return.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>List of raw payloads with the specified model version.</returns>
|
||||
Task<IReadOnlyList<EpssRaw>> GetByModelVersionAsync(
|
||||
string modelVersion,
|
||||
int limit = 100,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Prunes old raw payloads based on retention policy.
|
||||
/// </summary>
|
||||
/// <param name="retentionDays">Number of days to retain. Default: 365.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of records deleted.</returns>
|
||||
Task<int> PruneAsync(int retentionDays = 365, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// EPSS raw payload entity.
|
||||
/// </summary>
|
||||
public sealed record EpssRaw
|
||||
{
|
||||
/// <summary>
|
||||
/// Raw record ID (auto-generated).
|
||||
/// </summary>
|
||||
public long RawId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source URI where the data was retrieved from.
|
||||
/// </summary>
|
||||
public required string SourceUri { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Date of the EPSS snapshot.
|
||||
/// </summary>
|
||||
public required DateOnly AsOfDate { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when the data was ingested.
|
||||
/// </summary>
|
||||
public DateTimeOffset IngestionTs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Full payload as JSON array: [{cve:"CVE-...", epss:0.123, percentile:0.456}, ...].
|
||||
/// </summary>
|
||||
public required string Payload { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA-256 hash of decompressed content for integrity verification.
|
||||
/// </summary>
|
||||
public required byte[] PayloadSha256 { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Raw comment line from CSV header (e.g., "# model: v2025.03.14, published: 2025-03-14").
|
||||
/// </summary>
|
||||
public string? HeaderComment { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Extracted model version from header comment.
|
||||
/// </summary>
|
||||
public string? ModelVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Extracted publish date from header comment.
|
||||
/// </summary>
|
||||
public DateOnly? PublishedDate { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of rows in the payload.
|
||||
/// </summary>
|
||||
public required int RowCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Original compressed file size (bytes).
|
||||
/// </summary>
|
||||
public long? CompressedSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Decompressed CSV size (bytes).
|
||||
/// </summary>
|
||||
public long? DecompressedSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the import run that created this record.
|
||||
/// </summary>
|
||||
public Guid? ImportRunId { get; init; }
|
||||
}
|
||||
@@ -5,6 +5,7 @@
|
||||
// Description: EPSS persistence contract (import runs, scores/current projection, change log).
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Scanner.Core.Epss;
|
||||
using StellaOps.Scanner.Storage.Epss;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Repositories;
|
||||
@@ -54,6 +55,21 @@ public interface IEpssRepository
|
||||
string cveId,
|
||||
int days,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets EPSS change records for a model date, optionally filtered by flags.
|
||||
/// Used by enrichment job to target only CVEs with material changes.
|
||||
/// </summary>
|
||||
/// <param name="modelDate">The EPSS model date.</param>
|
||||
/// <param name="flags">Change flags to filter by. Null returns all changes.</param>
|
||||
/// <param name="limit">Maximum number of records to return.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>List of change records matching the criteria.</returns>
|
||||
Task<IReadOnlyList<EpssChangeRecord>> GetChangesAsync(
|
||||
DateOnly modelDate,
|
||||
EpssChangeFlags? flags = null,
|
||||
int limit = 100000,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
public sealed record EpssImportRun(
|
||||
|
||||
@@ -0,0 +1,242 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IEpssSignalRepository.cs
|
||||
// Sprint: SPRINT_3413_0001_0001_epss_live_enrichment
|
||||
// Task: S2 - Implement IEpssSignalRepository interface
|
||||
// Description: Repository interface for EPSS signal-ready events.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Repository for EPSS signal-ready events (tenant-scoped).
|
||||
/// </summary>
|
||||
public interface IEpssSignalRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a new EPSS signal.
|
||||
/// </summary>
|
||||
/// <param name="signal">The signal to create.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The created signal with generated ID.</returns>
|
||||
Task<EpssSignal> CreateAsync(EpssSignal signal, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Creates multiple EPSS signals in bulk.
|
||||
/// Uses upsert with dedupe_key to prevent duplicates.
|
||||
/// </summary>
|
||||
/// <param name="signals">The signals to create.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of signals created (excluding duplicates).</returns>
|
||||
Task<int> CreateBulkAsync(IEnumerable<EpssSignal> signals, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets signals for a tenant within a date range.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="startDate">Start date (inclusive).</param>
|
||||
/// <param name="endDate">End date (inclusive).</param>
|
||||
/// <param name="eventTypes">Optional filter by event types.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>List of signals ordered by model_date descending.</returns>
|
||||
Task<IReadOnlyList<EpssSignal>> GetByTenantAsync(
|
||||
Guid tenantId,
|
||||
DateOnly startDate,
|
||||
DateOnly endDate,
|
||||
IEnumerable<string>? eventTypes = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets signals for a specific CVE within a tenant.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="cveId">CVE identifier.</param>
|
||||
/// <param name="limit">Maximum number of signals to return.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>List of signals ordered by model_date descending.</returns>
|
||||
Task<IReadOnlyList<EpssSignal>> GetByCveAsync(
|
||||
Guid tenantId,
|
||||
string cveId,
|
||||
int limit = 100,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets high-priority signals (CRITICAL/HIGH band) for a tenant.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="startDate">Start date (inclusive).</param>
|
||||
/// <param name="endDate">End date (inclusive).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>List of high-priority signals.</returns>
|
||||
Task<IReadOnlyList<EpssSignal>> GetHighPriorityAsync(
|
||||
Guid tenantId,
|
||||
DateOnly startDate,
|
||||
DateOnly endDate,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the signal configuration for a tenant.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The configuration, or null if not configured.</returns>
|
||||
Task<EpssSignalConfig?> GetConfigAsync(Guid tenantId, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Upserts the signal configuration for a tenant.
|
||||
/// </summary>
|
||||
/// <param name="config">The configuration to upsert.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The upserted configuration.</returns>
|
||||
Task<EpssSignalConfig> UpsertConfigAsync(EpssSignalConfig config, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Prunes old signals based on retention policy.
|
||||
/// </summary>
|
||||
/// <param name="retentionDays">Number of days to retain. Default: 90.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of signals deleted.</returns>
|
||||
Task<int> PruneAsync(int retentionDays = 90, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// EPSS signal entity.
|
||||
/// </summary>
|
||||
public sealed record EpssSignal
|
||||
{
|
||||
/// <summary>
|
||||
/// Signal ID (auto-generated).
|
||||
/// </summary>
|
||||
public long SignalId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
public required Guid TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// EPSS model date.
|
||||
/// </summary>
|
||||
public required DateOnly ModelDate { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CVE identifier.
|
||||
/// </summary>
|
||||
public required string CveId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Event type: RISK_SPIKE, BAND_CHANGE, NEW_HIGH, DROPPED_LOW, MODEL_UPDATED.
|
||||
/// </summary>
|
||||
public required string EventType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Risk band: CRITICAL, HIGH, MEDIUM, LOW.
|
||||
/// </summary>
|
||||
public string? RiskBand { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// EPSS score at signal time.
|
||||
/// </summary>
|
||||
public double? EpssScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// EPSS score delta from previous day.
|
||||
/// </summary>
|
||||
public double? EpssDelta { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// EPSS percentile at signal time.
|
||||
/// </summary>
|
||||
public double? Percentile { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Percentile delta from previous day.
|
||||
/// </summary>
|
||||
public double? PercentileDelta { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether this is a model version change day.
|
||||
/// </summary>
|
||||
public bool IsModelChange { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// EPSS model version.
|
||||
/// </summary>
|
||||
public string? ModelVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic deduplication key.
|
||||
/// </summary>
|
||||
public required string DedupeKey { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA-256 of signal inputs for audit trail.
|
||||
/// </summary>
|
||||
public required byte[] ExplainHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Full evidence payload as JSON.
|
||||
/// </summary>
|
||||
public required string Payload { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creation timestamp.
|
||||
/// </summary>
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// EPSS signal configuration for a tenant.
|
||||
/// </summary>
|
||||
public sealed record EpssSignalConfig
|
||||
{
|
||||
/// <summary>
|
||||
/// Configuration ID.
|
||||
/// </summary>
|
||||
public Guid ConfigId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
public required Guid TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Critical percentile threshold. Default: 0.995.
|
||||
/// </summary>
|
||||
public double CriticalPercentile { get; init; } = 0.995;
|
||||
|
||||
/// <summary>
|
||||
/// High percentile threshold. Default: 0.99.
|
||||
/// </summary>
|
||||
public double HighPercentile { get; init; } = 0.99;
|
||||
|
||||
/// <summary>
|
||||
/// Medium percentile threshold. Default: 0.90.
|
||||
/// </summary>
|
||||
public double MediumPercentile { get; init; } = 0.90;
|
||||
|
||||
/// <summary>
|
||||
/// Big jump delta threshold. Default: 0.10.
|
||||
/// </summary>
|
||||
public double BigJumpDelta { get; init; } = 0.10;
|
||||
|
||||
/// <summary>
|
||||
/// Suppress signals on model version change. Default: true.
|
||||
/// </summary>
|
||||
public bool SuppressOnModelChange { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Enabled event types.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> EnabledEventTypes { get; init; } =
|
||||
new[] { "RISK_SPIKE", "BAND_CHANGE", "NEW_HIGH" };
|
||||
|
||||
/// <summary>
|
||||
/// Creation timestamp.
|
||||
/// </summary>
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Last update timestamp.
|
||||
/// </summary>
|
||||
public DateTimeOffset UpdatedAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,101 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IObservedCveRepository.cs
|
||||
// Sprint: SPRINT_3413_0001_0001_epss_live_enrichment
|
||||
// Task: S6 - Add observed CVEs filter
|
||||
// Description: Repository interface for tracking observed CVEs per tenant.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Repository for tracking which CVEs are observed (in use) by each tenant.
|
||||
/// Used to filter EPSS signals to only relevant CVEs.
|
||||
/// </summary>
|
||||
public interface IObservedCveRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the set of CVE IDs that are currently observed by a tenant.
|
||||
/// Only CVEs that exist in the tenant's vulnerability inventory.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Set of observed CVE IDs.</returns>
|
||||
Task<IReadOnlySet<string>> GetObservedCvesAsync(
|
||||
Guid tenantId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a CVE is observed by a tenant.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="cveId">CVE identifier.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if the CVE is observed.</returns>
|
||||
Task<bool> IsObservedAsync(
|
||||
Guid tenantId,
|
||||
string cveId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Filters a set of CVE IDs to only those observed by a tenant.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="cveIds">CVE IDs to filter.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Filtered set of observed CVE IDs.</returns>
|
||||
Task<IReadOnlySet<string>> FilterObservedAsync(
|
||||
Guid tenantId,
|
||||
IEnumerable<string> cveIds,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all tenant IDs that have at least one observed CVE.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>List of tenant IDs.</returns>
|
||||
Task<IReadOnlyList<Guid>> GetActiveTenantsAsync(
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets tenant IDs that observe specific CVEs.
|
||||
/// Used for targeted signal delivery.
|
||||
/// </summary>
|
||||
/// <param name="cveIds">CVE IDs to check.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Dictionary mapping CVE ID to list of tenant IDs observing it.</returns>
|
||||
Task<IReadOnlyDictionary<string, IReadOnlyList<Guid>>> GetTenantsObservingCvesAsync(
|
||||
IEnumerable<string> cveIds,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Null implementation of IObservedCveRepository for when tenant filtering is disabled.
|
||||
/// Returns all CVEs as observed.
|
||||
/// </summary>
|
||||
public sealed class NullObservedCveRepository : IObservedCveRepository
|
||||
{
|
||||
public static readonly NullObservedCveRepository Instance = new();
|
||||
|
||||
private NullObservedCveRepository() { }
|
||||
|
||||
public Task<IReadOnlySet<string>> GetObservedCvesAsync(Guid tenantId, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<IReadOnlySet<string>>(new HashSet<string>(StringComparer.OrdinalIgnoreCase));
|
||||
|
||||
public Task<bool> IsObservedAsync(Guid tenantId, string cveId, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(true); // All CVEs are observed when filtering is disabled
|
||||
|
||||
public Task<IReadOnlySet<string>> FilterObservedAsync(Guid tenantId, IEnumerable<string> cveIds, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<IReadOnlySet<string>>(new HashSet<string>(cveIds, StringComparer.OrdinalIgnoreCase));
|
||||
|
||||
public Task<IReadOnlyList<Guid>> GetActiveTenantsAsync(CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<IReadOnlyList<Guid>>(new[] { Guid.Empty });
|
||||
|
||||
public Task<IReadOnlyDictionary<string, IReadOnlyList<Guid>>> GetTenantsObservingCvesAsync(IEnumerable<string> cveIds, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var result = cveIds.ToDictionary(
|
||||
cve => cve,
|
||||
_ => (IReadOnlyList<Guid>)new[] { Guid.Empty },
|
||||
StringComparer.OrdinalIgnoreCase);
|
||||
return Task.FromResult<IReadOnlyDictionary<string, IReadOnlyList<Guid>>>(result);
|
||||
}
|
||||
}
|
||||
@@ -27,5 +27,6 @@
|
||||
<ProjectReference Include="..\\StellaOps.Scanner.ReachabilityDrift\\StellaOps.Scanner.ReachabilityDrift.csproj" />
|
||||
<ProjectReference Include="..\\StellaOps.Scanner.SmartDiff\\StellaOps.Scanner.SmartDiff.csproj" />
|
||||
<ProjectReference Include="..\\..\\..\\__Libraries\\StellaOps.Infrastructure.Postgres\\StellaOps.Infrastructure.Postgres.csproj" />
|
||||
<ProjectReference Include="..\\..\\..\\__Libraries\\StellaOps.Messaging\\StellaOps.Messaging.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -0,0 +1,197 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CecilMethodFingerprinterTests.cs
|
||||
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core
|
||||
// Description: Unit tests for CecilMethodFingerprinter.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Scanner.VulnSurfaces.Fingerprint;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.VulnSurfaces.Tests;
|
||||
|
||||
public class CecilMethodFingerprinterTests
|
||||
{
|
||||
private readonly CecilMethodFingerprinter _fingerprinter;
|
||||
|
||||
public CecilMethodFingerprinterTests()
|
||||
{
|
||||
_fingerprinter = new CecilMethodFingerprinter(
|
||||
NullLogger<CecilMethodFingerprinter>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Ecosystem_ReturnsNuget()
|
||||
{
|
||||
Assert.Equal("nuget", _fingerprinter.Ecosystem);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FingerprintAsync_WithNullRequest_ThrowsArgumentNullException()
|
||||
{
|
||||
await Assert.ThrowsAsync<ArgumentNullException>(
|
||||
() => _fingerprinter.FingerprintAsync(null!));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FingerprintAsync_WithNonExistentPath_ReturnsEmptyResult()
|
||||
{
|
||||
// Arrange
|
||||
var request = new FingerprintRequest
|
||||
{
|
||||
PackagePath = "/nonexistent/path/to/package",
|
||||
PackageName = "nonexistent",
|
||||
Version = "1.0.0"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _fingerprinter.FingerprintAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.True(result.Success);
|
||||
Assert.Empty(result.Methods);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FingerprintAsync_WithOwnAssembly_FindsMethods()
|
||||
{
|
||||
// Arrange - use the test assembly itself
|
||||
var testAssemblyPath = typeof(CecilMethodFingerprinterTests).Assembly.Location;
|
||||
var assemblyDir = Path.GetDirectoryName(testAssemblyPath)!;
|
||||
|
||||
var request = new FingerprintRequest
|
||||
{
|
||||
PackagePath = assemblyDir,
|
||||
PackageName = "test",
|
||||
Version = "1.0.0",
|
||||
IncludePrivateMethods = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _fingerprinter.FingerprintAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.True(result.Success);
|
||||
Assert.NotEmpty(result.Methods);
|
||||
|
||||
// Should find this test class
|
||||
Assert.True(result.Methods.Count > 0, "Should find at least some methods");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FingerprintAsync_ComputesDeterministicHashes()
|
||||
{
|
||||
// Arrange - fingerprint twice
|
||||
var testAssemblyPath = typeof(CecilMethodFingerprinterTests).Assembly.Location;
|
||||
var assemblyDir = Path.GetDirectoryName(testAssemblyPath)!;
|
||||
|
||||
var request = new FingerprintRequest
|
||||
{
|
||||
PackagePath = assemblyDir,
|
||||
PackageName = "test",
|
||||
Version = "1.0.0",
|
||||
IncludePrivateMethods = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result1 = await _fingerprinter.FingerprintAsync(request);
|
||||
var result2 = await _fingerprinter.FingerprintAsync(request);
|
||||
|
||||
// Assert - same methods should produce same hashes
|
||||
Assert.Equal(result1.Methods.Count, result2.Methods.Count);
|
||||
|
||||
foreach (var (key, fp1) in result1.Methods)
|
||||
{
|
||||
Assert.True(result2.Methods.TryGetValue(key, out var fp2));
|
||||
Assert.Equal(fp1.BodyHash, fp2.BodyHash);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FingerprintAsync_WithCancellation_RespectsCancellation()
|
||||
{
|
||||
// Arrange
|
||||
using var cts = new CancellationTokenSource();
|
||||
cts.Cancel();
|
||||
|
||||
var testAssemblyPath = typeof(CecilMethodFingerprinterTests).Assembly.Location;
|
||||
var assemblyDir = Path.GetDirectoryName(testAssemblyPath)!;
|
||||
|
||||
var request = new FingerprintRequest
|
||||
{
|
||||
PackagePath = assemblyDir,
|
||||
PackageName = "test",
|
||||
Version = "1.0.0"
|
||||
};
|
||||
|
||||
// Act - operation may either throw or return early
|
||||
// since the token is already cancelled
|
||||
try
|
||||
{
|
||||
await _fingerprinter.FingerprintAsync(request, cts.Token);
|
||||
// If it doesn't throw, that's also acceptable behavior
|
||||
// The key is that it should respect the cancellation token
|
||||
Assert.True(true, "Method completed without throwing - acceptable if it checks token");
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
// Expected behavior
|
||||
Assert.True(true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FingerprintAsync_MethodKeyFormat_IsValid()
|
||||
{
|
||||
// Arrange
|
||||
var testAssemblyPath = typeof(CecilMethodFingerprinterTests).Assembly.Location;
|
||||
var assemblyDir = Path.GetDirectoryName(testAssemblyPath)!;
|
||||
|
||||
var request = new FingerprintRequest
|
||||
{
|
||||
PackagePath = assemblyDir,
|
||||
PackageName = "test",
|
||||
Version = "1.0.0",
|
||||
IncludePrivateMethods = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _fingerprinter.FingerprintAsync(request);
|
||||
|
||||
// Assert - keys should not be empty
|
||||
foreach (var key in result.Methods.Keys)
|
||||
{
|
||||
Assert.NotEmpty(key);
|
||||
// Method keys use "::" separator between type and method
|
||||
// Some may be anonymous types like "<>f__AnonymousType0`2"
|
||||
// Just verify they're non-empty and have reasonable format
|
||||
Assert.True(key.Contains("::") || key.Contains("."),
|
||||
$"Method key should contain :: or . separator: {key}");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FingerprintAsync_IncludesSignature()
|
||||
{
|
||||
// Arrange
|
||||
var testAssemblyPath = typeof(CecilMethodFingerprinterTests).Assembly.Location;
|
||||
var assemblyDir = Path.GetDirectoryName(testAssemblyPath)!;
|
||||
|
||||
var request = new FingerprintRequest
|
||||
{
|
||||
PackagePath = assemblyDir,
|
||||
PackageName = "test",
|
||||
Version = "1.0.0",
|
||||
IncludePrivateMethods = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _fingerprinter.FingerprintAsync(request);
|
||||
|
||||
// Assert - fingerprints should have signatures
|
||||
var anyWithSignature = result.Methods.Values.Any(fp => !string.IsNullOrEmpty(fp.Signature));
|
||||
Assert.True(anyWithSignature, "At least some methods should have signatures");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,348 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// MethodDiffEngineTests.cs
|
||||
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core
|
||||
// Description: Unit tests for MethodDiffEngine.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Scanner.VulnSurfaces.Fingerprint;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.VulnSurfaces.Tests;
|
||||
|
||||
public class MethodDiffEngineTests
|
||||
{
|
||||
private readonly MethodDiffEngine _diffEngine;
|
||||
|
||||
public MethodDiffEngineTests()
|
||||
{
|
||||
_diffEngine = new MethodDiffEngine(
|
||||
NullLogger<MethodDiffEngine>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DiffAsync_WithNullRequest_ThrowsArgumentNullException()
|
||||
{
|
||||
await Assert.ThrowsAsync<ArgumentNullException>(
|
||||
() => _diffEngine.DiffAsync(null!));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DiffAsync_WithIdenticalFingerprints_ReturnsNoChanges()
|
||||
{
|
||||
// Arrange
|
||||
var fingerprint = CreateFingerprint("Test.Class::Method", "sha256:abc123");
|
||||
|
||||
var result1 = new FingerprintResult
|
||||
{
|
||||
Success = true,
|
||||
Methods = new Dictionary<string, MethodFingerprint>
|
||||
{
|
||||
[fingerprint.MethodKey] = fingerprint
|
||||
}
|
||||
};
|
||||
|
||||
var result2 = new FingerprintResult
|
||||
{
|
||||
Success = true,
|
||||
Methods = new Dictionary<string, MethodFingerprint>
|
||||
{
|
||||
[fingerprint.MethodKey] = fingerprint
|
||||
}
|
||||
};
|
||||
|
||||
var request = new MethodDiffRequest
|
||||
{
|
||||
VulnFingerprints = result1,
|
||||
FixedFingerprints = result2
|
||||
};
|
||||
|
||||
// Act
|
||||
var diff = await _diffEngine.DiffAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.True(diff.Success);
|
||||
Assert.Empty(diff.Modified);
|
||||
Assert.Empty(diff.Added);
|
||||
Assert.Empty(diff.Removed);
|
||||
Assert.Equal(0, diff.TotalChanges);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DiffAsync_WithModifiedMethod_ReturnsModified()
|
||||
{
|
||||
// Arrange
|
||||
var vulnFp = CreateFingerprint("Test.Class::Method", "sha256:old_hash");
|
||||
var fixedFp = CreateFingerprint("Test.Class::Method", "sha256:new_hash");
|
||||
|
||||
var vulnResult = new FingerprintResult
|
||||
{
|
||||
Success = true,
|
||||
Methods = new Dictionary<string, MethodFingerprint>
|
||||
{
|
||||
[vulnFp.MethodKey] = vulnFp
|
||||
}
|
||||
};
|
||||
|
||||
var fixedResult = new FingerprintResult
|
||||
{
|
||||
Success = true,
|
||||
Methods = new Dictionary<string, MethodFingerprint>
|
||||
{
|
||||
[fixedFp.MethodKey] = fixedFp
|
||||
}
|
||||
};
|
||||
|
||||
var request = new MethodDiffRequest
|
||||
{
|
||||
VulnFingerprints = vulnResult,
|
||||
FixedFingerprints = fixedResult
|
||||
};
|
||||
|
||||
// Act
|
||||
var diff = await _diffEngine.DiffAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.True(diff.Success);
|
||||
Assert.Single(diff.Modified);
|
||||
Assert.Equal("Test.Class::Method", diff.Modified[0].MethodKey);
|
||||
Assert.Equal("sha256:old_hash", diff.Modified[0].VulnVersion.BodyHash);
|
||||
Assert.Equal("sha256:new_hash", diff.Modified[0].FixedVersion.BodyHash);
|
||||
Assert.Empty(diff.Added);
|
||||
Assert.Empty(diff.Removed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DiffAsync_WithAddedMethod_ReturnsAdded()
|
||||
{
|
||||
// Arrange
|
||||
var vulnFp = CreateFingerprint("Test.Class::ExistingMethod", "sha256:existing");
|
||||
var newFp = CreateFingerprint("Test.Class::NewMethod", "sha256:new_method");
|
||||
|
||||
var vulnResult = new FingerprintResult
|
||||
{
|
||||
Success = true,
|
||||
Methods = new Dictionary<string, MethodFingerprint>
|
||||
{
|
||||
[vulnFp.MethodKey] = vulnFp
|
||||
}
|
||||
};
|
||||
|
||||
var fixedResult = new FingerprintResult
|
||||
{
|
||||
Success = true,
|
||||
Methods = new Dictionary<string, MethodFingerprint>
|
||||
{
|
||||
[vulnFp.MethodKey] = vulnFp,
|
||||
[newFp.MethodKey] = newFp
|
||||
}
|
||||
};
|
||||
|
||||
var request = new MethodDiffRequest
|
||||
{
|
||||
VulnFingerprints = vulnResult,
|
||||
FixedFingerprints = fixedResult
|
||||
};
|
||||
|
||||
// Act
|
||||
var diff = await _diffEngine.DiffAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.True(diff.Success);
|
||||
Assert.Empty(diff.Modified);
|
||||
Assert.Single(diff.Added);
|
||||
Assert.Equal("Test.Class::NewMethod", diff.Added[0].MethodKey);
|
||||
Assert.Empty(diff.Removed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DiffAsync_WithRemovedMethod_ReturnsRemoved()
|
||||
{
|
||||
// Arrange
|
||||
var existingFp = CreateFingerprint("Test.Class::ExistingMethod", "sha256:existing");
|
||||
var removedFp = CreateFingerprint("Test.Class::RemovedMethod", "sha256:removed");
|
||||
|
||||
var vulnResult = new FingerprintResult
|
||||
{
|
||||
Success = true,
|
||||
Methods = new Dictionary<string, MethodFingerprint>
|
||||
{
|
||||
[existingFp.MethodKey] = existingFp,
|
||||
[removedFp.MethodKey] = removedFp
|
||||
}
|
||||
};
|
||||
|
||||
var fixedResult = new FingerprintResult
|
||||
{
|
||||
Success = true,
|
||||
Methods = new Dictionary<string, MethodFingerprint>
|
||||
{
|
||||
[existingFp.MethodKey] = existingFp
|
||||
}
|
||||
};
|
||||
|
||||
var request = new MethodDiffRequest
|
||||
{
|
||||
VulnFingerprints = vulnResult,
|
||||
FixedFingerprints = fixedResult
|
||||
};
|
||||
|
||||
// Act
|
||||
var diff = await _diffEngine.DiffAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.True(diff.Success);
|
||||
Assert.Empty(diff.Modified);
|
||||
Assert.Empty(diff.Added);
|
||||
Assert.Single(diff.Removed);
|
||||
Assert.Equal("Test.Class::RemovedMethod", diff.Removed[0].MethodKey);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DiffAsync_WithMultipleChanges_ReturnsAllChanges()
|
||||
{
|
||||
// Arrange - simulate a fix that modifies one method, adds one, removes one
|
||||
var unchangedFp = CreateFingerprint("Test::Unchanged", "h1");
|
||||
var modifiedVuln = CreateFingerprint("Test::Modified", "old");
|
||||
var modifiedFixed = CreateFingerprint("Test::Modified", "new");
|
||||
var removedFp = CreateFingerprint("Test::Removed", "h2");
|
||||
var addedFp = CreateFingerprint("Test::Added", "h3");
|
||||
|
||||
var vulnResult = new FingerprintResult
|
||||
{
|
||||
Success = true,
|
||||
Methods = new Dictionary<string, MethodFingerprint>
|
||||
{
|
||||
[unchangedFp.MethodKey] = unchangedFp,
|
||||
[modifiedVuln.MethodKey] = modifiedVuln,
|
||||
[removedFp.MethodKey] = removedFp
|
||||
}
|
||||
};
|
||||
|
||||
var fixedResult = new FingerprintResult
|
||||
{
|
||||
Success = true,
|
||||
Methods = new Dictionary<string, MethodFingerprint>
|
||||
{
|
||||
[unchangedFp.MethodKey] = unchangedFp,
|
||||
[modifiedFixed.MethodKey] = modifiedFixed,
|
||||
[addedFp.MethodKey] = addedFp
|
||||
}
|
||||
};
|
||||
|
||||
var request = new MethodDiffRequest
|
||||
{
|
||||
VulnFingerprints = vulnResult,
|
||||
FixedFingerprints = fixedResult
|
||||
};
|
||||
|
||||
// Act
|
||||
var diff = await _diffEngine.DiffAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.True(diff.Success);
|
||||
Assert.Single(diff.Modified);
|
||||
Assert.Single(diff.Added);
|
||||
Assert.Single(diff.Removed);
|
||||
Assert.Equal(3, diff.TotalChanges);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DiffAsync_TriggerMethods_AreModifiedOrRemoved()
|
||||
{
|
||||
// This test validates the key insight:
|
||||
// Trigger methods (the vulnerable entry points) are typically MODIFIED or REMOVED in a fix
|
||||
// They wouldn't be ADDED in the fixed version
|
||||
|
||||
// Arrange
|
||||
var triggerMethodVuln = CreateFingerprint(
|
||||
"Newtonsoft.Json.JsonConvert::DeserializeObject",
|
||||
"sha256:vulnerable_impl");
|
||||
|
||||
var triggerMethodFixed = CreateFingerprint(
|
||||
"Newtonsoft.Json.JsonConvert::DeserializeObject",
|
||||
"sha256:patched_impl");
|
||||
|
||||
var vulnResult = new FingerprintResult
|
||||
{
|
||||
Success = true,
|
||||
Methods = new Dictionary<string, MethodFingerprint>
|
||||
{
|
||||
[triggerMethodVuln.MethodKey] = triggerMethodVuln
|
||||
}
|
||||
};
|
||||
|
||||
var fixedResult = new FingerprintResult
|
||||
{
|
||||
Success = true,
|
||||
Methods = new Dictionary<string, MethodFingerprint>
|
||||
{
|
||||
[triggerMethodFixed.MethodKey] = triggerMethodFixed
|
||||
}
|
||||
};
|
||||
|
||||
var request = new MethodDiffRequest
|
||||
{
|
||||
VulnFingerprints = vulnResult,
|
||||
FixedFingerprints = fixedResult
|
||||
};
|
||||
|
||||
// Act
|
||||
var diff = await _diffEngine.DiffAsync(request);
|
||||
|
||||
// Assert - the trigger method should show as modified
|
||||
Assert.True(diff.Success);
|
||||
Assert.Single(diff.Modified);
|
||||
Assert.Equal("Newtonsoft.Json.JsonConvert::DeserializeObject", diff.Modified[0].MethodKey);
|
||||
Assert.Empty(diff.Added);
|
||||
Assert.Empty(diff.Removed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DiffAsync_WithEmptyFingerprints_ReturnsNoChanges()
|
||||
{
|
||||
// Arrange
|
||||
var vulnResult = new FingerprintResult
|
||||
{
|
||||
Success = true,
|
||||
Methods = new Dictionary<string, MethodFingerprint>()
|
||||
};
|
||||
|
||||
var fixedResult = new FingerprintResult
|
||||
{
|
||||
Success = true,
|
||||
Methods = new Dictionary<string, MethodFingerprint>()
|
||||
};
|
||||
|
||||
var request = new MethodDiffRequest
|
||||
{
|
||||
VulnFingerprints = vulnResult,
|
||||
FixedFingerprints = fixedResult
|
||||
};
|
||||
|
||||
// Act
|
||||
var diff = await _diffEngine.DiffAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.True(diff.Success);
|
||||
Assert.Equal(0, diff.TotalChanges);
|
||||
}
|
||||
|
||||
private static MethodFingerprint CreateFingerprint(string methodKey, string bodyHash)
|
||||
{
|
||||
var parts = methodKey.Split("::");
|
||||
var declaringType = parts.Length > 1 ? parts[0] : "Unknown";
|
||||
var name = parts.Length > 1 ? parts[1] : parts[0];
|
||||
|
||||
return new MethodFingerprint
|
||||
{
|
||||
MethodKey = methodKey,
|
||||
DeclaringType = declaringType,
|
||||
Name = name,
|
||||
BodyHash = bodyHash,
|
||||
Signature = $"void {name}()",
|
||||
IsPublic = true,
|
||||
BodySize = 100
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,362 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// NuGetPackageDownloaderTests.cs
|
||||
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core
|
||||
// Task: SURF-020
|
||||
// Description: Unit tests for NuGetPackageDownloader.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Net;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using Moq.Protected;
|
||||
using StellaOps.Scanner.VulnSurfaces.Download;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.VulnSurfaces.Tests;
|
||||
|
||||
public class NuGetPackageDownloaderTests : IDisposable
|
||||
{
|
||||
private readonly string _testOutputDir;
|
||||
|
||||
public NuGetPackageDownloaderTests()
|
||||
{
|
||||
_testOutputDir = Path.Combine(Path.GetTempPath(), $"nuget-test-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_testOutputDir);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(_testOutputDir))
|
||||
{
|
||||
try { Directory.Delete(_testOutputDir, recursive: true); }
|
||||
catch { /* ignore cleanup failures */ }
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Ecosystem_ReturnsNuget()
|
||||
{
|
||||
// Arrange
|
||||
var downloader = CreateDownloader();
|
||||
|
||||
// Assert
|
||||
Assert.Equal("nuget", downloader.Ecosystem);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DownloadAsync_WithNullRequest_ThrowsArgumentNullException()
|
||||
{
|
||||
// Arrange
|
||||
var downloader = CreateDownloader();
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<ArgumentNullException>(
|
||||
() => downloader.DownloadAsync(null!));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DownloadAsync_WithHttpError_ReturnsFailResult()
|
||||
{
|
||||
// Arrange
|
||||
var mockHandler = new Mock<HttpMessageHandler>();
|
||||
mockHandler
|
||||
.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.ReturnsAsync(new HttpResponseMessage
|
||||
{
|
||||
StatusCode = HttpStatusCode.NotFound,
|
||||
ReasonPhrase = "Not Found"
|
||||
});
|
||||
|
||||
var httpClient = new HttpClient(mockHandler.Object);
|
||||
var downloader = CreateDownloader(httpClient);
|
||||
|
||||
var request = new PackageDownloadRequest
|
||||
{
|
||||
PackageName = "NonExistent.Package",
|
||||
Version = "1.0.0",
|
||||
OutputDirectory = _testOutputDir,
|
||||
UseCache = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await downloader.DownloadAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.Success);
|
||||
Assert.Contains("404", result.Error ?? "");
|
||||
Assert.Null(result.ExtractedPath);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DownloadAsync_WithValidNupkg_ReturnsSuccessResult()
|
||||
{
|
||||
// Arrange - create a mock .nupkg (which is just a zip file)
|
||||
var nupkgContent = CreateMinimalNupkg();
|
||||
|
||||
var mockHandler = new Mock<HttpMessageHandler>();
|
||||
mockHandler
|
||||
.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.ReturnsAsync(new HttpResponseMessage
|
||||
{
|
||||
StatusCode = HttpStatusCode.OK,
|
||||
Content = new ByteArrayContent(nupkgContent)
|
||||
});
|
||||
|
||||
var httpClient = new HttpClient(mockHandler.Object);
|
||||
var downloader = CreateDownloader(httpClient);
|
||||
|
||||
var request = new PackageDownloadRequest
|
||||
{
|
||||
PackageName = "TestPackage",
|
||||
Version = "1.0.0",
|
||||
OutputDirectory = _testOutputDir,
|
||||
UseCache = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await downloader.DownloadAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.ExtractedPath);
|
||||
Assert.NotNull(result.ArchivePath);
|
||||
Assert.True(Directory.Exists(result.ExtractedPath));
|
||||
Assert.True(File.Exists(result.ArchivePath));
|
||||
Assert.False(result.FromCache);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DownloadAsync_WithCachedPackage_ReturnsCachedResult()
|
||||
{
|
||||
// Arrange - pre-create the cached directory
|
||||
var packageDir = Path.Combine(_testOutputDir, "testpackage.1.0.0");
|
||||
Directory.CreateDirectory(packageDir);
|
||||
File.WriteAllText(Path.Combine(packageDir, "marker.txt"), "cached");
|
||||
|
||||
var downloader = CreateDownloader();
|
||||
|
||||
var request = new PackageDownloadRequest
|
||||
{
|
||||
PackageName = "TestPackage",
|
||||
Version = "1.0.0",
|
||||
OutputDirectory = _testOutputDir,
|
||||
UseCache = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await downloader.DownloadAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Success);
|
||||
Assert.True(result.FromCache);
|
||||
Assert.Equal(packageDir, result.ExtractedPath);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DownloadAsync_WithCacheFalse_BypassesCache()
|
||||
{
|
||||
// Arrange - pre-create the cached directory
|
||||
var packageDir = Path.Combine(_testOutputDir, "testpackage.2.0.0");
|
||||
Directory.CreateDirectory(packageDir);
|
||||
|
||||
// Set up mock to return content (we're bypassing cache)
|
||||
var nupkgContent = CreateMinimalNupkg();
|
||||
var mockHandler = new Mock<HttpMessageHandler>();
|
||||
mockHandler
|
||||
.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.ReturnsAsync(new HttpResponseMessage
|
||||
{
|
||||
StatusCode = HttpStatusCode.OK,
|
||||
Content = new ByteArrayContent(nupkgContent)
|
||||
});
|
||||
|
||||
var httpClient = new HttpClient(mockHandler.Object);
|
||||
var downloader = CreateDownloader(httpClient);
|
||||
|
||||
var request = new PackageDownloadRequest
|
||||
{
|
||||
PackageName = "TestPackage",
|
||||
Version = "2.0.0",
|
||||
OutputDirectory = _testOutputDir,
|
||||
UseCache = false // Bypass cache
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await downloader.DownloadAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Success);
|
||||
Assert.False(result.FromCache);
|
||||
|
||||
// Verify HTTP call was made
|
||||
mockHandler.Protected().Verify(
|
||||
"SendAsync",
|
||||
Times.Once(),
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DownloadAsync_UsesCorrectUrl()
|
||||
{
|
||||
// Arrange
|
||||
HttpRequestMessage? capturedRequest = null;
|
||||
|
||||
var mockHandler = new Mock<HttpMessageHandler>();
|
||||
mockHandler
|
||||
.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.Callback<HttpRequestMessage, CancellationToken>((req, _) => capturedRequest = req)
|
||||
.ReturnsAsync(new HttpResponseMessage
|
||||
{
|
||||
StatusCode = HttpStatusCode.NotFound
|
||||
});
|
||||
|
||||
var httpClient = new HttpClient(mockHandler.Object);
|
||||
var downloader = CreateDownloader(httpClient);
|
||||
|
||||
var request = new PackageDownloadRequest
|
||||
{
|
||||
PackageName = "Newtonsoft.Json",
|
||||
Version = "13.0.3",
|
||||
OutputDirectory = _testOutputDir,
|
||||
UseCache = false
|
||||
};
|
||||
|
||||
// Act
|
||||
await downloader.DownloadAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(capturedRequest);
|
||||
Assert.Contains("newtonsoft.json", capturedRequest.RequestUri!.ToString());
|
||||
Assert.Contains("13.0.3", capturedRequest.RequestUri!.ToString());
|
||||
Assert.EndsWith(".nupkg", capturedRequest.RequestUri!.ToString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DownloadAsync_WithCustomRegistry_UsesCustomUrl()
|
||||
{
|
||||
// Arrange
|
||||
HttpRequestMessage? capturedRequest = null;
|
||||
|
||||
var mockHandler = new Mock<HttpMessageHandler>();
|
||||
mockHandler
|
||||
.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.Callback<HttpRequestMessage, CancellationToken>((req, _) => capturedRequest = req)
|
||||
.ReturnsAsync(new HttpResponseMessage
|
||||
{
|
||||
StatusCode = HttpStatusCode.NotFound
|
||||
});
|
||||
|
||||
var httpClient = new HttpClient(mockHandler.Object);
|
||||
var downloader = CreateDownloader(httpClient);
|
||||
|
||||
var request = new PackageDownloadRequest
|
||||
{
|
||||
PackageName = "TestPackage",
|
||||
Version = "1.0.0",
|
||||
OutputDirectory = _testOutputDir,
|
||||
RegistryUrl = "https://custom.nuget.feed.example.com/v3",
|
||||
UseCache = false
|
||||
};
|
||||
|
||||
// Act
|
||||
await downloader.DownloadAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(capturedRequest);
|
||||
Assert.StartsWith("https://custom.nuget.feed.example.com/v3", capturedRequest.RequestUri!.ToString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DownloadAsync_WithCancellation_HonorsCancellation()
|
||||
{
|
||||
// Arrange
|
||||
using var cts = new CancellationTokenSource();
|
||||
cts.Cancel();
|
||||
|
||||
var mockHandler = new Mock<HttpMessageHandler>();
|
||||
mockHandler
|
||||
.Protected()
|
||||
.Setup<Task<HttpResponseMessage>>(
|
||||
"SendAsync",
|
||||
ItExpr.IsAny<HttpRequestMessage>(),
|
||||
ItExpr.IsAny<CancellationToken>())
|
||||
.ThrowsAsync(new TaskCanceledException());
|
||||
|
||||
var httpClient = new HttpClient(mockHandler.Object);
|
||||
var downloader = CreateDownloader(httpClient);
|
||||
|
||||
var request = new PackageDownloadRequest
|
||||
{
|
||||
PackageName = "TestPackage",
|
||||
Version = "1.0.0",
|
||||
OutputDirectory = _testOutputDir,
|
||||
UseCache = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await downloader.DownloadAsync(request, cts.Token);
|
||||
|
||||
// Assert - should return failure, not throw
|
||||
Assert.False(result.Success);
|
||||
Assert.Contains("cancel", result.Error?.ToLower() ?? "");
|
||||
}
|
||||
|
||||
private NuGetPackageDownloader CreateDownloader(HttpClient? httpClient = null)
|
||||
{
|
||||
var client = httpClient ?? new HttpClient();
|
||||
var options = Options.Create(new NuGetDownloaderOptions());
|
||||
|
||||
return new NuGetPackageDownloader(
|
||||
client,
|
||||
NullLogger<NuGetPackageDownloader>.Instance,
|
||||
options);
|
||||
}
|
||||
|
||||
private static byte[] CreateMinimalNupkg()
|
||||
{
|
||||
// Create a minimal valid ZIP file (which is what a .nupkg is)
|
||||
using var ms = new MemoryStream();
|
||||
using (var archive = new System.IO.Compression.ZipArchive(ms, System.IO.Compression.ZipArchiveMode.Create, leaveOpen: true))
|
||||
{
|
||||
// Add a minimal .nuspec file
|
||||
var nuspecEntry = archive.CreateEntry("test.nuspec");
|
||||
using var writer = new StreamWriter(nuspecEntry.Open());
|
||||
writer.Write("""
|
||||
<?xml version="1.0"?>
|
||||
<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd">
|
||||
<metadata>
|
||||
<id>TestPackage</id>
|
||||
<version>1.0.0</version>
|
||||
<authors>Test</authors>
|
||||
<description>Test package</description>
|
||||
</metadata>
|
||||
</package>
|
||||
""");
|
||||
}
|
||||
|
||||
return ms.ToArray();
|
||||
}
|
||||
}
|
||||
@@ -13,6 +13,7 @@ using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.VulnSurfaces.CallGraph;
|
||||
using StellaOps.Scanner.VulnSurfaces.Diagnostics;
|
||||
using StellaOps.Scanner.VulnSurfaces.Download;
|
||||
using StellaOps.Scanner.VulnSurfaces.Fingerprint;
|
||||
using StellaOps.Scanner.VulnSurfaces.Models;
|
||||
@@ -56,6 +57,12 @@ public sealed class VulnSurfaceBuilder : IVulnSurfaceBuilder
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var sw = Stopwatch.StartNew();
|
||||
var tags = new KeyValuePair<string, object?>[]
|
||||
{
|
||||
new("ecosystem", request.Ecosystem.ToLowerInvariant())
|
||||
};
|
||||
|
||||
VulnSurfaceMetrics.BuildRequests.Add(1, tags);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Building vulnerability surface for {CveId}: {Package} {VulnVersion} → {FixedVersion}",
|
||||
@@ -87,6 +94,8 @@ public sealed class VulnSurfaceBuilder : IVulnSurfaceBuilder
|
||||
Directory.CreateDirectory(workDir);
|
||||
|
||||
// 3. Download both versions
|
||||
VulnSurfaceMetrics.DownloadAttempts.Add(2, tags); // Two versions
|
||||
|
||||
var vulnDownload = await downloader.DownloadAsync(new PackageDownloadRequest
|
||||
{
|
||||
PackageName = request.PackageName,
|
||||
@@ -98,9 +107,14 @@ public sealed class VulnSurfaceBuilder : IVulnSurfaceBuilder
|
||||
if (!vulnDownload.Success)
|
||||
{
|
||||
sw.Stop();
|
||||
VulnSurfaceMetrics.DownloadFailures.Add(1, tags);
|
||||
VulnSurfaceMetrics.BuildFailures.Add(1, new KeyValuePair<string, object?>[] { new("ecosystem", request.Ecosystem.ToLowerInvariant()), new("reason", "download_vuln") });
|
||||
return VulnSurfaceBuildResult.Fail($"Failed to download vulnerable version: {vulnDownload.Error}", sw.Elapsed);
|
||||
}
|
||||
|
||||
VulnSurfaceMetrics.DownloadSuccesses.Add(1, tags);
|
||||
VulnSurfaceMetrics.DownloadDurationSeconds.Record(vulnDownload.Duration.TotalSeconds, tags);
|
||||
|
||||
var fixedDownload = await downloader.DownloadAsync(new PackageDownloadRequest
|
||||
{
|
||||
PackageName = request.PackageName,
|
||||
@@ -112,10 +126,16 @@ public sealed class VulnSurfaceBuilder : IVulnSurfaceBuilder
|
||||
if (!fixedDownload.Success)
|
||||
{
|
||||
sw.Stop();
|
||||
VulnSurfaceMetrics.DownloadFailures.Add(1, tags);
|
||||
VulnSurfaceMetrics.BuildFailures.Add(1, new KeyValuePair<string, object?>[] { new("ecosystem", request.Ecosystem.ToLowerInvariant()), new("reason", "download_fixed") });
|
||||
return VulnSurfaceBuildResult.Fail($"Failed to download fixed version: {fixedDownload.Error}", sw.Elapsed);
|
||||
}
|
||||
|
||||
VulnSurfaceMetrics.DownloadSuccesses.Add(1, tags);
|
||||
VulnSurfaceMetrics.DownloadDurationSeconds.Record(fixedDownload.Duration.TotalSeconds, tags);
|
||||
|
||||
// 4. Fingerprint both versions
|
||||
var fpSw = Stopwatch.StartNew();
|
||||
var vulnFingerprints = await fingerprinter.FingerprintAsync(new FingerprintRequest
|
||||
{
|
||||
PackagePath = vulnDownload.ExtractedPath!,
|
||||
@@ -126,9 +146,15 @@ public sealed class VulnSurfaceBuilder : IVulnSurfaceBuilder
|
||||
if (!vulnFingerprints.Success)
|
||||
{
|
||||
sw.Stop();
|
||||
VulnSurfaceMetrics.BuildFailures.Add(1, new KeyValuePair<string, object?>[] { new("ecosystem", request.Ecosystem.ToLowerInvariant()), new("reason", "fingerprint_vuln") });
|
||||
return VulnSurfaceBuildResult.Fail($"Failed to fingerprint vulnerable version: {vulnFingerprints.Error}", sw.Elapsed);
|
||||
}
|
||||
|
||||
VulnSurfaceMetrics.FingerprintDurationSeconds.Record(fpSw.Elapsed.TotalSeconds, tags);
|
||||
VulnSurfaceMetrics.MethodsFingerprinted.Add(vulnFingerprints.Methods.Count, tags);
|
||||
VulnSurfaceMetrics.MethodsPerPackage.Record(vulnFingerprints.Methods.Count, tags);
|
||||
|
||||
fpSw.Restart();
|
||||
var fixedFingerprints = await fingerprinter.FingerprintAsync(new FingerprintRequest
|
||||
{
|
||||
PackagePath = fixedDownload.ExtractedPath!,
|
||||
@@ -139,10 +165,16 @@ public sealed class VulnSurfaceBuilder : IVulnSurfaceBuilder
|
||||
if (!fixedFingerprints.Success)
|
||||
{
|
||||
sw.Stop();
|
||||
VulnSurfaceMetrics.BuildFailures.Add(1, new KeyValuePair<string, object?>[] { new("ecosystem", request.Ecosystem.ToLowerInvariant()), new("reason", "fingerprint_fixed") });
|
||||
return VulnSurfaceBuildResult.Fail($"Failed to fingerprint fixed version: {fixedFingerprints.Error}", sw.Elapsed);
|
||||
}
|
||||
|
||||
VulnSurfaceMetrics.FingerprintDurationSeconds.Record(fpSw.Elapsed.TotalSeconds, tags);
|
||||
VulnSurfaceMetrics.MethodsFingerprinted.Add(fixedFingerprints.Methods.Count, tags);
|
||||
VulnSurfaceMetrics.MethodsPerPackage.Record(fixedFingerprints.Methods.Count, tags);
|
||||
|
||||
// 5. Compute diff
|
||||
var diffSw = Stopwatch.StartNew();
|
||||
var diff = await _diffEngine.DiffAsync(new MethodDiffRequest
|
||||
{
|
||||
VulnFingerprints = vulnFingerprints,
|
||||
@@ -152,9 +184,12 @@ public sealed class VulnSurfaceBuilder : IVulnSurfaceBuilder
|
||||
if (!diff.Success)
|
||||
{
|
||||
sw.Stop();
|
||||
VulnSurfaceMetrics.BuildFailures.Add(1, new KeyValuePair<string, object?>[] { new("ecosystem", request.Ecosystem.ToLowerInvariant()), new("reason", "diff") });
|
||||
return VulnSurfaceBuildResult.Fail($"Failed to compute diff: {diff.Error}", sw.Elapsed);
|
||||
}
|
||||
|
||||
VulnSurfaceMetrics.DiffDurationSeconds.Record(diffSw.Elapsed.TotalSeconds, tags);
|
||||
|
||||
// 6. Build sinks from diff
|
||||
var sinks = BuildSinks(diff);
|
||||
|
||||
@@ -209,6 +244,13 @@ public sealed class VulnSurfaceBuilder : IVulnSurfaceBuilder
|
||||
|
||||
sw.Stop();
|
||||
|
||||
// Record success metrics
|
||||
VulnSurfaceMetrics.BuildSuccesses.Add(1, tags);
|
||||
VulnSurfaceMetrics.BuildDurationSeconds.Record(sw.Elapsed.TotalSeconds, tags);
|
||||
VulnSurfaceMetrics.SinksPerSurface.Record(sinks.Count, tags);
|
||||
VulnSurfaceMetrics.SinksIdentified.Add(sinks.Count, tags);
|
||||
VulnSurfaceMetrics.IncrementEcosystemCount(request.Ecosystem);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Built vulnerability surface for {CveId}: {SinkCount} sinks, {TriggerCount} triggers in {Duration}ms",
|
||||
request.CveId, sinks.Count, triggerCount, sw.ElapsedMilliseconds);
|
||||
@@ -218,6 +260,16 @@ public sealed class VulnSurfaceBuilder : IVulnSurfaceBuilder
|
||||
catch (Exception ex)
|
||||
{
|
||||
sw.Stop();
|
||||
|
||||
// Record failure metrics
|
||||
var failTags = new KeyValuePair<string, object?>[]
|
||||
{
|
||||
new("ecosystem", request.Ecosystem.ToLowerInvariant()),
|
||||
new("reason", "exception")
|
||||
};
|
||||
VulnSurfaceMetrics.BuildFailures.Add(1, failTags);
|
||||
VulnSurfaceMetrics.BuildDurationSeconds.Record(sw.Elapsed.TotalSeconds, tags);
|
||||
|
||||
_logger.LogError(ex, "Failed to build vulnerability surface for {CveId}", request.CveId);
|
||||
return VulnSurfaceBuildResult.Fail(ex.Message, sw.Elapsed);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,233 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// VulnSurfaceMetrics.cs
|
||||
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core
|
||||
// Task: SURF-019
|
||||
// Description: Metrics for vulnerability surface computation.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Scanner.VulnSurfaces.Diagnostics;
|
||||
|
||||
/// <summary>
|
||||
/// Metrics for vulnerability surface computation and caching.
|
||||
/// </summary>
|
||||
public static class VulnSurfaceMetrics
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Scanner.VulnSurfaces", "1.0.0");
|
||||
|
||||
// ===== BUILD COUNTERS =====
|
||||
|
||||
/// <summary>
|
||||
/// Total surface build requests by ecosystem.
|
||||
/// </summary>
|
||||
public static readonly Counter<long> BuildRequests = Meter.CreateCounter<long>(
|
||||
"stellaops_vulnsurface_build_requests_total",
|
||||
description: "Total vulnerability surface build requests");
|
||||
|
||||
/// <summary>
|
||||
/// Successful surface builds by ecosystem.
|
||||
/// </summary>
|
||||
public static readonly Counter<long> BuildSuccesses = Meter.CreateCounter<long>(
|
||||
"stellaops_vulnsurface_build_successes_total",
|
||||
description: "Total successful vulnerability surface builds");
|
||||
|
||||
/// <summary>
|
||||
/// Failed surface builds by ecosystem and reason.
|
||||
/// </summary>
|
||||
public static readonly Counter<long> BuildFailures = Meter.CreateCounter<long>(
|
||||
"stellaops_vulnsurface_build_failures_total",
|
||||
description: "Total failed vulnerability surface builds");
|
||||
|
||||
/// <summary>
|
||||
/// Cache hits when surface already computed.
|
||||
/// </summary>
|
||||
public static readonly Counter<long> CacheHits = Meter.CreateCounter<long>(
|
||||
"stellaops_vulnsurface_cache_hits_total",
|
||||
description: "Total cache hits for pre-computed surfaces");
|
||||
|
||||
// ===== DOWNLOAD COUNTERS =====
|
||||
|
||||
/// <summary>
|
||||
/// Package downloads attempted by ecosystem.
|
||||
/// </summary>
|
||||
public static readonly Counter<long> DownloadAttempts = Meter.CreateCounter<long>(
|
||||
"stellaops_vulnsurface_downloads_attempted_total",
|
||||
description: "Total package download attempts");
|
||||
|
||||
/// <summary>
|
||||
/// Successful package downloads.
|
||||
/// </summary>
|
||||
public static readonly Counter<long> DownloadSuccesses = Meter.CreateCounter<long>(
|
||||
"stellaops_vulnsurface_downloads_succeeded_total",
|
||||
description: "Total successful package downloads");
|
||||
|
||||
/// <summary>
|
||||
/// Failed package downloads.
|
||||
/// </summary>
|
||||
public static readonly Counter<long> DownloadFailures = Meter.CreateCounter<long>(
|
||||
"stellaops_vulnsurface_downloads_failed_total",
|
||||
description: "Total failed package downloads");
|
||||
|
||||
// ===== FINGERPRINT COUNTERS =====
|
||||
|
||||
/// <summary>
|
||||
/// Methods fingerprinted by ecosystem.
|
||||
/// </summary>
|
||||
public static readonly Counter<long> MethodsFingerprinted = Meter.CreateCounter<long>(
|
||||
"stellaops_vulnsurface_methods_fingerprinted_total",
|
||||
description: "Total methods fingerprinted");
|
||||
|
||||
/// <summary>
|
||||
/// Methods changed (sinks) identified.
|
||||
/// </summary>
|
||||
public static readonly Counter<long> SinksIdentified = Meter.CreateCounter<long>(
|
||||
"stellaops_vulnsurface_sinks_identified_total",
|
||||
description: "Total sink methods (changed methods) identified");
|
||||
|
||||
// ===== TIMING HISTOGRAMS =====
|
||||
|
||||
/// <summary>
|
||||
/// End-to-end surface build duration.
|
||||
/// </summary>
|
||||
public static readonly Histogram<double> BuildDurationSeconds = Meter.CreateHistogram<double>(
|
||||
"stellaops_vulnsurface_build_duration_seconds",
|
||||
unit: "s",
|
||||
description: "Duration of surface build operations",
|
||||
advice: new InstrumentAdvice<double>
|
||||
{
|
||||
HistogramBucketBoundaries = [0.1, 0.5, 1.0, 2.5, 5.0, 10.0, 30.0, 60.0, 120.0]
|
||||
});
|
||||
|
||||
/// <summary>
|
||||
/// Package download duration.
|
||||
/// </summary>
|
||||
public static readonly Histogram<double> DownloadDurationSeconds = Meter.CreateHistogram<double>(
|
||||
"stellaops_vulnsurface_download_duration_seconds",
|
||||
unit: "s",
|
||||
description: "Duration of package download operations",
|
||||
advice: new InstrumentAdvice<double>
|
||||
{
|
||||
HistogramBucketBoundaries = [0.1, 0.5, 1.0, 2.5, 5.0, 10.0, 30.0]
|
||||
});
|
||||
|
||||
/// <summary>
|
||||
/// Fingerprinting duration per package.
|
||||
/// </summary>
|
||||
public static readonly Histogram<double> FingerprintDurationSeconds = Meter.CreateHistogram<double>(
|
||||
"stellaops_vulnsurface_fingerprint_duration_seconds",
|
||||
unit: "s",
|
||||
description: "Duration of fingerprinting operations",
|
||||
advice: new InstrumentAdvice<double>
|
||||
{
|
||||
HistogramBucketBoundaries = [0.05, 0.1, 0.25, 0.5, 1.0, 2.5, 5.0, 10.0]
|
||||
});
|
||||
|
||||
/// <summary>
|
||||
/// Diff computation duration.
|
||||
/// </summary>
|
||||
public static readonly Histogram<double> DiffDurationSeconds = Meter.CreateHistogram<double>(
|
||||
"stellaops_vulnsurface_diff_duration_seconds",
|
||||
unit: "s",
|
||||
description: "Duration of diff computation",
|
||||
advice: new InstrumentAdvice<double>
|
||||
{
|
||||
HistogramBucketBoundaries = [0.001, 0.01, 0.05, 0.1, 0.25, 0.5, 1.0]
|
||||
});
|
||||
|
||||
// ===== SIZE HISTOGRAMS =====
|
||||
|
||||
/// <summary>
|
||||
/// Number of methods per package version.
|
||||
/// </summary>
|
||||
public static readonly Histogram<int> MethodsPerPackage = Meter.CreateHistogram<int>(
|
||||
"stellaops_vulnsurface_methods_per_package",
|
||||
description: "Number of methods per analyzed package version",
|
||||
advice: new InstrumentAdvice<int>
|
||||
{
|
||||
HistogramBucketBoundaries = [10, 50, 100, 250, 500, 1000, 2500, 5000, 10000]
|
||||
});
|
||||
|
||||
/// <summary>
|
||||
/// Number of sinks per surface.
|
||||
/// </summary>
|
||||
public static readonly Histogram<int> SinksPerSurface = Meter.CreateHistogram<int>(
|
||||
"stellaops_vulnsurface_sinks_per_surface",
|
||||
description: "Number of sink methods per vulnerability surface",
|
||||
advice: new InstrumentAdvice<int>
|
||||
{
|
||||
HistogramBucketBoundaries = [1, 2, 5, 10, 25, 50, 100, 250]
|
||||
});
|
||||
|
||||
// ===== ECOSYSTEM DISTRIBUTION =====
|
||||
|
||||
private static int _nugetSurfaces;
|
||||
private static int _npmSurfaces;
|
||||
private static int _mavenSurfaces;
|
||||
private static int _pypiSurfaces;
|
||||
|
||||
/// <summary>
|
||||
/// Current count of NuGet surfaces.
|
||||
/// </summary>
|
||||
public static readonly ObservableGauge<int> NuGetSurfaceCount = Meter.CreateObservableGauge(
|
||||
"stellaops_vulnsurface_nuget_count",
|
||||
() => _nugetSurfaces,
|
||||
description: "Current count of NuGet vulnerability surfaces");
|
||||
|
||||
/// <summary>
|
||||
/// Current count of npm surfaces.
|
||||
/// </summary>
|
||||
public static readonly ObservableGauge<int> NpmSurfaceCount = Meter.CreateObservableGauge(
|
||||
"stellaops_vulnsurface_npm_count",
|
||||
() => _npmSurfaces,
|
||||
description: "Current count of npm vulnerability surfaces");
|
||||
|
||||
/// <summary>
|
||||
/// Current count of Maven surfaces.
|
||||
/// </summary>
|
||||
public static readonly ObservableGauge<int> MavenSurfaceCount = Meter.CreateObservableGauge(
|
||||
"stellaops_vulnsurface_maven_count",
|
||||
() => _mavenSurfaces,
|
||||
description: "Current count of Maven vulnerability surfaces");
|
||||
|
||||
/// <summary>
|
||||
/// Current count of PyPI surfaces.
|
||||
/// </summary>
|
||||
public static readonly ObservableGauge<int> PyPISurfaceCount = Meter.CreateObservableGauge(
|
||||
"stellaops_vulnsurface_pypi_count",
|
||||
() => _pypiSurfaces,
|
||||
description: "Current count of PyPI vulnerability surfaces");
|
||||
|
||||
/// <summary>
|
||||
/// Updates the ecosystem surface counts.
|
||||
/// </summary>
|
||||
public static void SetEcosystemCounts(int nuget, int npm, int maven, int pypi)
|
||||
{
|
||||
Interlocked.Exchange(ref _nugetSurfaces, nuget);
|
||||
Interlocked.Exchange(ref _npmSurfaces, npm);
|
||||
Interlocked.Exchange(ref _mavenSurfaces, maven);
|
||||
Interlocked.Exchange(ref _pypiSurfaces, pypi);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Increments the surface count for an ecosystem.
|
||||
/// </summary>
|
||||
public static void IncrementEcosystemCount(string ecosystem)
|
||||
{
|
||||
switch (ecosystem.ToLowerInvariant())
|
||||
{
|
||||
case "nuget":
|
||||
Interlocked.Increment(ref _nugetSurfaces);
|
||||
break;
|
||||
case "npm":
|
||||
Interlocked.Increment(ref _npmSurfaces);
|
||||
break;
|
||||
case "maven":
|
||||
Interlocked.Increment(ref _mavenSurfaces);
|
||||
break;
|
||||
case "pypi":
|
||||
Interlocked.Increment(ref _pypiSurfaces);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -124,6 +124,12 @@ public sealed record VulnSurfaceSink
|
||||
[JsonPropertyName("method_name")]
|
||||
public required string MethodName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Namespace/package.
|
||||
/// </summary>
|
||||
[JsonPropertyName("namespace")]
|
||||
public string? Namespace { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Method signature.
|
||||
/// </summary>
|
||||
@@ -153,6 +159,42 @@ public sealed record VulnSurfaceSink
|
||||
/// </summary>
|
||||
[JsonPropertyName("is_direct_exploit")]
|
||||
public bool IsDirectExploit { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the method is public.
|
||||
/// </summary>
|
||||
[JsonPropertyName("is_public")]
|
||||
public bool IsPublic { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of parameters.
|
||||
/// </summary>
|
||||
[JsonPropertyName("parameter_count")]
|
||||
public int? ParameterCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Return type.
|
||||
/// </summary>
|
||||
[JsonPropertyName("return_type")]
|
||||
public string? ReturnType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source file path (if available from debug symbols).
|
||||
/// </summary>
|
||||
[JsonPropertyName("source_file")]
|
||||
public string? SourceFile { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Start line number.
|
||||
/// </summary>
|
||||
[JsonPropertyName("start_line")]
|
||||
public int? StartLine { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// End line number.
|
||||
/// </summary>
|
||||
[JsonPropertyName("end_line")]
|
||||
public int? EndLine { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||
<PackageReference Include="Mono.Cecil" Version="0.11.6" />
|
||||
<PackageReference Include="Npgsql" Version="9.0.3" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -0,0 +1,99 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IVulnSurfaceRepository.cs
|
||||
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core
|
||||
// Task: SURF-016
|
||||
// Description: Repository interface for vulnerability surfaces.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Scanner.VulnSurfaces.Models;
|
||||
|
||||
namespace StellaOps.Scanner.VulnSurfaces.Storage;
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface for vulnerability surface storage.
|
||||
/// </summary>
|
||||
public interface IVulnSurfaceRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a new vulnerability surface.
|
||||
/// </summary>
|
||||
Task<Guid> CreateSurfaceAsync(
|
||||
Guid tenantId,
|
||||
string cveId,
|
||||
string ecosystem,
|
||||
string packageName,
|
||||
string vulnVersion,
|
||||
string? fixedVersion,
|
||||
string fingerprintMethod,
|
||||
int totalMethodsVuln,
|
||||
int totalMethodsFixed,
|
||||
int changedMethodCount,
|
||||
int? computationDurationMs,
|
||||
string? attestationDigest,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Adds a sink method to a vulnerability surface.
|
||||
/// </summary>
|
||||
Task<Guid> AddSinkAsync(
|
||||
Guid surfaceId,
|
||||
string methodKey,
|
||||
string methodName,
|
||||
string declaringType,
|
||||
string changeType,
|
||||
string? vulnHash,
|
||||
string? fixedHash,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Adds a trigger to a surface.
|
||||
/// </summary>
|
||||
Task<Guid> AddTriggerAsync(
|
||||
Guid surfaceId,
|
||||
string triggerMethodKey,
|
||||
string sinkMethodKey,
|
||||
int depth,
|
||||
double confidence,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a vulnerability surface by CVE and package.
|
||||
/// </summary>
|
||||
Task<VulnSurface?> GetByCveAndPackageAsync(
|
||||
Guid tenantId,
|
||||
string cveId,
|
||||
string ecosystem,
|
||||
string packageName,
|
||||
string vulnVersion,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets sinks for a vulnerability surface.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<VulnSurfaceSink>> GetSinksAsync(
|
||||
Guid surfaceId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets triggers for a vulnerability surface.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<VulnSurfaceTrigger>> GetTriggersAsync(
|
||||
Guid surfaceId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all surfaces for a CVE.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<VulnSurface>> GetSurfacesByCveAsync(
|
||||
Guid tenantId,
|
||||
string cveId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes a vulnerability surface and all related data.
|
||||
/// </summary>
|
||||
Task<bool> DeleteSurfaceAsync(
|
||||
Guid surfaceId,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,100 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IVulnSurfaceRepository.cs
|
||||
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core
|
||||
// Task: SURF-016
|
||||
// Description: Repository interface for vulnerability surfaces.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Scanner.VulnSurfaces.Models;
|
||||
|
||||
namespace StellaOps.Scanner.VulnSurfaces.Storage;
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface for vulnerability surface storage.
|
||||
/// </summary>
|
||||
public interface IVulnSurfaceRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a new vulnerability surface.
|
||||
/// </summary>
|
||||
Task<Guid> CreateSurfaceAsync(
|
||||
Guid tenantId,
|
||||
string cveId,
|
||||
string ecosystem,
|
||||
string packageName,
|
||||
string vulnVersion,
|
||||
string? fixedVersion,
|
||||
string fingerprintMethod,
|
||||
int totalMethodsVuln,
|
||||
int totalMethodsFixed,
|
||||
int changedMethodCount,
|
||||
int? computationDurationMs,
|
||||
string? attestationDigest,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Adds a sink method to a vulnerability surface.
|
||||
/// </summary>
|
||||
Task<Guid> AddSinkAsync(
|
||||
Guid surfaceId,
|
||||
string methodKey,
|
||||
string methodName,
|
||||
string declaringType,
|
||||
string changeType,
|
||||
string? vulnHash,
|
||||
string? fixedHash,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Adds a trigger to a surface.
|
||||
/// </summary>
|
||||
Task<Guid> AddTriggerAsync(
|
||||
Guid surfaceId,
|
||||
string triggerMethodKey,
|
||||
string sinkMethodKey,
|
||||
int depth,
|
||||
double confidence,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a vulnerability surface by CVE and package.
|
||||
/// </summary>
|
||||
Task<VulnSurface?> GetByCveAndPackageAsync(
|
||||
Guid tenantId,
|
||||
string cveId,
|
||||
string ecosystem,
|
||||
string packageName,
|
||||
string vulnVersion,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets sinks for a vulnerability surface.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<VulnSurfaceSink>> GetSinksAsync(
|
||||
Guid surfaceId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets triggers for a vulnerability surface.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<VulnSurfaceTrigger>> GetTriggersAsync(
|
||||
Guid surfaceId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all surfaces for a CVE.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<VulnSurface>> GetSurfacesByCveAsync(
|
||||
Guid tenantId,
|
||||
string cveId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes a vulnerability surface and all related data.
|
||||
/// </summary>
|
||||
Task<bool> DeleteSurfaceAsync(
|
||||
Guid surfaceId,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,400 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PostgresVulnSurfaceRepository.cs
|
||||
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core
|
||||
// Task: SURF-016
|
||||
// Description: PostgreSQL implementation of vulnerability surface repository.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using StellaOps.Scanner.VulnSurfaces.Models;
|
||||
|
||||
namespace StellaOps.Scanner.VulnSurfaces.Storage;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of vulnerability surface repository.
|
||||
/// </summary>
|
||||
public sealed class PostgresVulnSurfaceRepository : IVulnSurfaceRepository
|
||||
{
|
||||
private readonly NpgsqlDataSource _dataSource;
|
||||
private readonly ILogger<PostgresVulnSurfaceRepository> _logger;
|
||||
private readonly int _commandTimeoutSeconds;
|
||||
|
||||
public PostgresVulnSurfaceRepository(
|
||||
NpgsqlDataSource dataSource,
|
||||
ILogger<PostgresVulnSurfaceRepository> logger,
|
||||
int commandTimeoutSeconds = 30)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_commandTimeoutSeconds = commandTimeoutSeconds;
|
||||
}
|
||||
|
||||
public async Task<Guid> CreateSurfaceAsync(
|
||||
Guid tenantId,
|
||||
string cveId,
|
||||
string ecosystem,
|
||||
string packageName,
|
||||
string vulnVersion,
|
||||
string? fixedVersion,
|
||||
string fingerprintMethod,
|
||||
int totalMethodsVuln,
|
||||
int totalMethodsFixed,
|
||||
int changedMethodCount,
|
||||
int? computationDurationMs,
|
||||
string? attestationDigest,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var id = Guid.NewGuid();
|
||||
|
||||
const string sql = """
|
||||
INSERT INTO scanner.vuln_surfaces (
|
||||
id, tenant_id, cve_id, package_ecosystem, package_name,
|
||||
vuln_version, fixed_version, fingerprint_method,
|
||||
total_methods_vuln, total_methods_fixed, changed_method_count,
|
||||
computation_duration_ms, attestation_digest
|
||||
) VALUES (
|
||||
@id, @tenant_id, @cve_id, @ecosystem, @package_name,
|
||||
@vuln_version, @fixed_version, @fingerprint_method,
|
||||
@total_methods_vuln, @total_methods_fixed, @changed_method_count,
|
||||
@computation_duration_ms, @attestation_digest
|
||||
)
|
||||
ON CONFLICT (tenant_id, cve_id, package_ecosystem, package_name, vuln_version)
|
||||
DO UPDATE SET
|
||||
fixed_version = EXCLUDED.fixed_version,
|
||||
fingerprint_method = EXCLUDED.fingerprint_method,
|
||||
total_methods_vuln = EXCLUDED.total_methods_vuln,
|
||||
total_methods_fixed = EXCLUDED.total_methods_fixed,
|
||||
changed_method_count = EXCLUDED.changed_method_count,
|
||||
computation_duration_ms = EXCLUDED.computation_duration_ms,
|
||||
attestation_digest = EXCLUDED.attestation_digest,
|
||||
computed_at = now()
|
||||
RETURNING id
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
await SetTenantContextAsync(connection, tenantId, cancellationToken);
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.CommandTimeout = _commandTimeoutSeconds;
|
||||
|
||||
command.Parameters.AddWithValue("id", id);
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("cve_id", cveId);
|
||||
command.Parameters.AddWithValue("ecosystem", ecosystem);
|
||||
command.Parameters.AddWithValue("package_name", packageName);
|
||||
command.Parameters.AddWithValue("vuln_version", vulnVersion);
|
||||
command.Parameters.AddWithValue("fixed_version", (object?)fixedVersion ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("fingerprint_method", fingerprintMethod);
|
||||
command.Parameters.AddWithValue("total_methods_vuln", totalMethodsVuln);
|
||||
command.Parameters.AddWithValue("total_methods_fixed", totalMethodsFixed);
|
||||
command.Parameters.AddWithValue("changed_method_count", changedMethodCount);
|
||||
command.Parameters.AddWithValue("computation_duration_ms", (object?)computationDurationMs ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("attestation_digest", (object?)attestationDigest ?? DBNull.Value);
|
||||
|
||||
var result = await command.ExecuteScalarAsync(cancellationToken);
|
||||
return (Guid)result!;
|
||||
}
|
||||
|
||||
public async Task<Guid> AddSinkAsync(
|
||||
Guid surfaceId,
|
||||
string methodKey,
|
||||
string methodName,
|
||||
string declaringType,
|
||||
string changeType,
|
||||
string? vulnHash,
|
||||
string? fixedHash,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var id = Guid.NewGuid();
|
||||
|
||||
const string sql = """
|
||||
INSERT INTO scanner.vuln_surface_sinks (
|
||||
id, surface_id, method_key, method_name, declaring_type,
|
||||
change_type, vuln_fingerprint, fixed_fingerprint
|
||||
) VALUES (
|
||||
@id, @surface_id, @method_key, @method_name, @declaring_type,
|
||||
@change_type, @vuln_hash, @fixed_hash
|
||||
)
|
||||
ON CONFLICT (surface_id, method_key) DO UPDATE SET
|
||||
change_type = EXCLUDED.change_type,
|
||||
vuln_fingerprint = EXCLUDED.vuln_fingerprint,
|
||||
fixed_fingerprint = EXCLUDED.fixed_fingerprint
|
||||
RETURNING id
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.CommandTimeout = _commandTimeoutSeconds;
|
||||
|
||||
command.Parameters.AddWithValue("id", id);
|
||||
command.Parameters.AddWithValue("surface_id", surfaceId);
|
||||
command.Parameters.AddWithValue("method_key", methodKey);
|
||||
command.Parameters.AddWithValue("method_name", methodName);
|
||||
command.Parameters.AddWithValue("declaring_type", declaringType);
|
||||
command.Parameters.AddWithValue("change_type", changeType);
|
||||
command.Parameters.AddWithValue("vuln_hash", (object?)vulnHash ?? DBNull.Value);
|
||||
command.Parameters.AddWithValue("fixed_hash", (object?)fixedHash ?? DBNull.Value);
|
||||
|
||||
var result = await command.ExecuteScalarAsync(cancellationToken);
|
||||
return (Guid)result!;
|
||||
}
|
||||
|
||||
public async Task<Guid> AddTriggerAsync(
|
||||
Guid surfaceId,
|
||||
string triggerMethodKey,
|
||||
string sinkMethodKey,
|
||||
int depth,
|
||||
double confidence,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var id = Guid.NewGuid();
|
||||
|
||||
const string sql = """
|
||||
INSERT INTO scanner.vuln_surface_triggers (
|
||||
id, sink_id, scan_id, caller_node_id, caller_method_key,
|
||||
reachability_bucket, path_length, confidence, call_type, is_conditional
|
||||
) VALUES (
|
||||
@id,
|
||||
(SELECT id FROM scanner.vuln_surface_sinks WHERE surface_id = @surface_id AND method_key = @sink_method_key LIMIT 1),
|
||||
@surface_id::uuid,
|
||||
@trigger_method_key,
|
||||
@trigger_method_key,
|
||||
'direct',
|
||||
@depth,
|
||||
@confidence,
|
||||
'direct',
|
||||
false
|
||||
)
|
||||
RETURNING id
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.CommandTimeout = _commandTimeoutSeconds;
|
||||
|
||||
command.Parameters.AddWithValue("id", id);
|
||||
command.Parameters.AddWithValue("surface_id", surfaceId);
|
||||
command.Parameters.AddWithValue("trigger_method_key", triggerMethodKey);
|
||||
command.Parameters.AddWithValue("sink_method_key", sinkMethodKey);
|
||||
command.Parameters.AddWithValue("depth", depth);
|
||||
command.Parameters.AddWithValue("confidence", (float)confidence);
|
||||
|
||||
var result = await command.ExecuteScalarAsync(cancellationToken);
|
||||
return result is Guid g ? g : Guid.Empty;
|
||||
}
|
||||
|
||||
public async Task<VulnSurface?> GetByCveAndPackageAsync(
|
||||
Guid tenantId,
|
||||
string cveId,
|
||||
string ecosystem,
|
||||
string packageName,
|
||||
string vulnVersion,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, tenant_id, cve_id, package_ecosystem, package_name,
|
||||
vuln_version, fixed_version, fingerprint_method,
|
||||
total_methods_vuln, total_methods_fixed, changed_method_count,
|
||||
computation_duration_ms, attestation_digest, computed_at
|
||||
FROM scanner.vuln_surfaces
|
||||
WHERE tenant_id = @tenant_id
|
||||
AND cve_id = @cve_id
|
||||
AND package_ecosystem = @ecosystem
|
||||
AND package_name = @package_name
|
||||
AND vuln_version = @vuln_version
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
await SetTenantContextAsync(connection, tenantId, cancellationToken);
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.CommandTimeout = _commandTimeoutSeconds;
|
||||
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("cve_id", cveId);
|
||||
command.Parameters.AddWithValue("ecosystem", ecosystem);
|
||||
command.Parameters.AddWithValue("package_name", packageName);
|
||||
command.Parameters.AddWithValue("vuln_version", vulnVersion);
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
if (!await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return MapToVulnSurface(reader);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<VulnSurfaceSink>> GetSinksAsync(
|
||||
Guid surfaceId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, surface_id, method_key, method_name, declaring_type,
|
||||
change_type, vuln_fingerprint, fixed_fingerprint
|
||||
FROM scanner.vuln_surface_sinks
|
||||
WHERE surface_id = @surface_id
|
||||
ORDER BY declaring_type, method_name
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.CommandTimeout = _commandTimeoutSeconds;
|
||||
|
||||
command.Parameters.AddWithValue("surface_id", surfaceId);
|
||||
|
||||
var sinks = new List<VulnSurfaceSink>();
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
sinks.Add(MapToSink(reader));
|
||||
}
|
||||
|
||||
return sinks;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<VulnSurfaceTrigger>> GetTriggersAsync(
|
||||
Guid surfaceId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT vst.id, vss.surface_id, vst.caller_method_key, vss.method_key,
|
||||
vst.path_length, vst.confidence
|
||||
FROM scanner.vuln_surface_triggers vst
|
||||
JOIN scanner.vuln_surface_sinks vss ON vst.sink_id = vss.id
|
||||
WHERE vss.surface_id = @surface_id
|
||||
ORDER BY vst.path_length
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.CommandTimeout = _commandTimeoutSeconds;
|
||||
|
||||
command.Parameters.AddWithValue("surface_id", surfaceId);
|
||||
|
||||
var triggers = new List<VulnSurfaceTrigger>();
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
triggers.Add(MapToTrigger(reader));
|
||||
}
|
||||
|
||||
return triggers;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<VulnSurface>> GetSurfacesByCveAsync(
|
||||
Guid tenantId,
|
||||
string cveId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, tenant_id, cve_id, package_ecosystem, package_name,
|
||||
vuln_version, fixed_version, fingerprint_method,
|
||||
total_methods_vuln, total_methods_fixed, changed_method_count,
|
||||
computation_duration_ms, attestation_digest, computed_at
|
||||
FROM scanner.vuln_surfaces
|
||||
WHERE tenant_id = @tenant_id AND cve_id = @cve_id
|
||||
ORDER BY package_ecosystem, package_name, vuln_version
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
await SetTenantContextAsync(connection, tenantId, cancellationToken);
|
||||
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.CommandTimeout = _commandTimeoutSeconds;
|
||||
|
||||
command.Parameters.AddWithValue("tenant_id", tenantId);
|
||||
command.Parameters.AddWithValue("cve_id", cveId);
|
||||
|
||||
var surfaces = new List<VulnSurface>();
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken);
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
surfaces.Add(MapToVulnSurface(reader));
|
||||
}
|
||||
|
||||
return surfaces;
|
||||
}
|
||||
|
||||
public async Task<bool> DeleteSurfaceAsync(
|
||||
Guid surfaceId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
DELETE FROM scanner.vuln_surfaces WHERE id = @id
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken);
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.CommandTimeout = _commandTimeoutSeconds;
|
||||
|
||||
command.Parameters.AddWithValue("id", surfaceId);
|
||||
|
||||
var rows = await command.ExecuteNonQueryAsync(cancellationToken);
|
||||
return rows > 0;
|
||||
}
|
||||
|
||||
private static async Task SetTenantContextAsync(
|
||||
NpgsqlConnection connection,
|
||||
Guid tenantId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var command = new NpgsqlCommand(
|
||||
$"SET LOCAL app.tenant_id = '{tenantId}'",
|
||||
connection);
|
||||
await command.ExecuteNonQueryAsync(cancellationToken);
|
||||
}
|
||||
|
||||
private static VulnSurface MapToVulnSurface(NpgsqlDataReader reader)
|
||||
{
|
||||
return new VulnSurface
|
||||
{
|
||||
SurfaceId = reader.GetGuid(0).GetHashCode(),
|
||||
CveId = reader.GetString(2),
|
||||
PackageId = $"pkg:{reader.GetString(3)}/{reader.GetString(4)}@{reader.GetString(5)}",
|
||||
Ecosystem = reader.GetString(3),
|
||||
VulnVersion = reader.GetString(5),
|
||||
FixedVersion = reader.IsDBNull(6) ? string.Empty : reader.GetString(6),
|
||||
Status = VulnSurfaceStatus.Computed,
|
||||
Confidence = 1.0,
|
||||
ComputedAt = reader.GetDateTime(13)
|
||||
};
|
||||
}
|
||||
|
||||
private static VulnSurfaceSink MapToSink(NpgsqlDataReader reader)
|
||||
{
|
||||
return new VulnSurfaceSink
|
||||
{
|
||||
SinkId = reader.GetGuid(0).GetHashCode(),
|
||||
SurfaceId = reader.GetGuid(1).GetHashCode(),
|
||||
MethodKey = reader.GetString(2),
|
||||
MethodName = reader.GetString(3),
|
||||
DeclaringType = reader.GetString(4),
|
||||
ChangeType = ParseChangeType(reader.GetString(5)),
|
||||
VulnHash = reader.IsDBNull(6) ? null : reader.GetString(6),
|
||||
FixedHash = reader.IsDBNull(7) ? null : reader.GetString(7)
|
||||
};
|
||||
}
|
||||
|
||||
private static VulnSurfaceTrigger MapToTrigger(NpgsqlDataReader reader)
|
||||
{
|
||||
return new VulnSurfaceTrigger
|
||||
{
|
||||
SurfaceId = reader.GetGuid(1).GetHashCode(),
|
||||
TriggerMethodKey = reader.GetString(2),
|
||||
SinkMethodKey = reader.GetString(3),
|
||||
Depth = reader.IsDBNull(4) ? 0 : reader.GetInt32(4),
|
||||
Confidence = reader.IsDBNull(5) ? 1.0 : reader.GetFloat(5)
|
||||
};
|
||||
}
|
||||
|
||||
private static MethodChangeType ParseChangeType(string changeType) => changeType switch
|
||||
{
|
||||
"added" => MethodChangeType.Added,
|
||||
"removed" => MethodChangeType.Removed,
|
||||
"modified" => MethodChangeType.Modified,
|
||||
"signaturechanged" => MethodChangeType.SignatureChanged,
|
||||
_ => MethodChangeType.Modified
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,304 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AttestingRichGraphWriterTests.cs
|
||||
// Sprint: SPRINT_3620_0001_0001_reachability_witness_dsse
|
||||
// Description: Tests for AttestingRichGraphWriter integration.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Scanner.Reachability.Attestation;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Tests;
|
||||
|
||||
public class AttestingRichGraphWriterTests : IAsyncLifetime
|
||||
{
|
||||
private DirectoryInfo _tempDir = null!;
|
||||
|
||||
public Task InitializeAsync()
|
||||
{
|
||||
_tempDir = Directory.CreateTempSubdirectory("attesting-writer-test-");
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task DisposeAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
if (_tempDir.Exists)
|
||||
{
|
||||
_tempDir.Delete(recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task WriteWithAttestationAsync_WhenEnabled_ProducesAttestationFile()
|
||||
{
|
||||
// Arrange
|
||||
var cryptoHash = new TestCryptoHash();
|
||||
var graphWriter = new RichGraphWriter(cryptoHash);
|
||||
var witnessOptions = Options.Create(new ReachabilityWitnessOptions
|
||||
{
|
||||
Enabled = true,
|
||||
StoreInCas = false,
|
||||
PublishToRekor = false
|
||||
});
|
||||
var witnessPublisher = new ReachabilityWitnessPublisher(
|
||||
witnessOptions,
|
||||
cryptoHash,
|
||||
NullLogger<ReachabilityWitnessPublisher>.Instance);
|
||||
|
||||
var writer = new AttestingRichGraphWriter(
|
||||
graphWriter,
|
||||
witnessPublisher,
|
||||
witnessOptions,
|
||||
NullLogger<AttestingRichGraphWriter>.Instance);
|
||||
|
||||
var graph = CreateTestGraph();
|
||||
|
||||
// Act
|
||||
var result = await writer.WriteWithAttestationAsync(
|
||||
graph,
|
||||
_tempDir.FullName,
|
||||
"test-analysis",
|
||||
"sha256:abc123");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.True(File.Exists(result.GraphPath));
|
||||
Assert.True(File.Exists(result.MetaPath));
|
||||
Assert.NotNull(result.AttestationPath);
|
||||
Assert.True(File.Exists(result.AttestationPath));
|
||||
Assert.NotNull(result.WitnessResult);
|
||||
Assert.NotEmpty(result.WitnessResult.StatementHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task WriteWithAttestationAsync_WhenDisabled_NoAttestationFile()
|
||||
{
|
||||
// Arrange
|
||||
var cryptoHash = new TestCryptoHash();
|
||||
var graphWriter = new RichGraphWriter(cryptoHash);
|
||||
var witnessOptions = Options.Create(new ReachabilityWitnessOptions
|
||||
{
|
||||
Enabled = false
|
||||
});
|
||||
var witnessPublisher = new ReachabilityWitnessPublisher(
|
||||
witnessOptions,
|
||||
cryptoHash,
|
||||
NullLogger<ReachabilityWitnessPublisher>.Instance);
|
||||
|
||||
var writer = new AttestingRichGraphWriter(
|
||||
graphWriter,
|
||||
witnessPublisher,
|
||||
witnessOptions,
|
||||
NullLogger<AttestingRichGraphWriter>.Instance);
|
||||
|
||||
var graph = CreateTestGraph();
|
||||
|
||||
// Act
|
||||
var result = await writer.WriteWithAttestationAsync(
|
||||
graph,
|
||||
_tempDir.FullName,
|
||||
"test-analysis",
|
||||
"sha256:abc123");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.True(File.Exists(result.GraphPath));
|
||||
Assert.True(File.Exists(result.MetaPath));
|
||||
Assert.Null(result.AttestationPath);
|
||||
Assert.Null(result.WitnessResult);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task WriteWithAttestationAsync_AttestationContainsValidDsse()
|
||||
{
|
||||
// Arrange
|
||||
var cryptoHash = new TestCryptoHash();
|
||||
var graphWriter = new RichGraphWriter(cryptoHash);
|
||||
var witnessOptions = Options.Create(new ReachabilityWitnessOptions
|
||||
{
|
||||
Enabled = true,
|
||||
StoreInCas = false,
|
||||
PublishToRekor = false
|
||||
});
|
||||
var witnessPublisher = new ReachabilityWitnessPublisher(
|
||||
witnessOptions,
|
||||
cryptoHash,
|
||||
NullLogger<ReachabilityWitnessPublisher>.Instance);
|
||||
|
||||
var writer = new AttestingRichGraphWriter(
|
||||
graphWriter,
|
||||
witnessPublisher,
|
||||
witnessOptions,
|
||||
NullLogger<AttestingRichGraphWriter>.Instance);
|
||||
|
||||
var graph = CreateTestGraph();
|
||||
|
||||
// Act
|
||||
var result = await writer.WriteWithAttestationAsync(
|
||||
graph,
|
||||
_tempDir.FullName,
|
||||
"test-analysis",
|
||||
"sha256:abc123");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result.AttestationPath);
|
||||
var dsseJson = await File.ReadAllTextAsync(result.AttestationPath);
|
||||
Assert.Contains("payloadType", dsseJson);
|
||||
// Note: + may be encoded as \u002B in JSON
|
||||
Assert.True(dsseJson.Contains("application/vnd.in-toto+json") || dsseJson.Contains("application/vnd.in-toto\\u002Bjson"));
|
||||
Assert.Contains("payload", dsseJson);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task WriteWithAttestationAsync_GraphHashIsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var cryptoHash = new TestCryptoHash();
|
||||
var graphWriter = new RichGraphWriter(cryptoHash);
|
||||
var witnessOptions = Options.Create(new ReachabilityWitnessOptions
|
||||
{
|
||||
Enabled = true,
|
||||
StoreInCas = false,
|
||||
PublishToRekor = false
|
||||
});
|
||||
var witnessPublisher = new ReachabilityWitnessPublisher(
|
||||
witnessOptions,
|
||||
cryptoHash,
|
||||
NullLogger<ReachabilityWitnessPublisher>.Instance);
|
||||
|
||||
var writer = new AttestingRichGraphWriter(
|
||||
graphWriter,
|
||||
witnessPublisher,
|
||||
witnessOptions,
|
||||
NullLogger<AttestingRichGraphWriter>.Instance);
|
||||
|
||||
var graph = CreateTestGraph();
|
||||
|
||||
// Act - write twice with same input
|
||||
var result1 = await writer.WriteWithAttestationAsync(
|
||||
graph,
|
||||
_tempDir.FullName,
|
||||
"analysis-1",
|
||||
"sha256:abc123");
|
||||
|
||||
var result2 = await writer.WriteWithAttestationAsync(
|
||||
graph,
|
||||
_tempDir.FullName,
|
||||
"analysis-2",
|
||||
"sha256:abc123");
|
||||
|
||||
// Assert - same graph should produce same hash
|
||||
Assert.Equal(result1.GraphHash, result2.GraphHash);
|
||||
}
|
||||
|
||||
private static RichGraph CreateTestGraph()
|
||||
{
|
||||
return new RichGraph(
|
||||
Nodes: new[]
|
||||
{
|
||||
new RichGraphNode(
|
||||
Id: "entry-1",
|
||||
SymbolId: "Handler.handle",
|
||||
CodeId: null,
|
||||
Purl: "pkg:maven/com.example/handler@1.0.0",
|
||||
Lang: "java",
|
||||
Kind: "http_handler",
|
||||
Display: "GET /api/users",
|
||||
BuildId: null,
|
||||
Evidence: null,
|
||||
Attributes: null,
|
||||
SymbolDigest: "sha256:entry1digest"),
|
||||
new RichGraphNode(
|
||||
Id: "sink-1",
|
||||
SymbolId: "DB.executeQuery",
|
||||
CodeId: null,
|
||||
Purl: "pkg:maven/org.database/driver@2.0.0",
|
||||
Lang: "java",
|
||||
Kind: "sql_sink",
|
||||
Display: "executeQuery(String)",
|
||||
BuildId: null,
|
||||
Evidence: null,
|
||||
Attributes: new Dictionary<string, string> { ["is_sink"] = "true" },
|
||||
SymbolDigest: "sha256:sink1digest")
|
||||
},
|
||||
Edges: new[]
|
||||
{
|
||||
new RichGraphEdge(
|
||||
From: "entry-1",
|
||||
To: "sink-1",
|
||||
Kind: "call",
|
||||
Purl: null,
|
||||
SymbolDigest: null,
|
||||
Evidence: null,
|
||||
Confidence: 1.0,
|
||||
Candidates: null)
|
||||
},
|
||||
Roots: new[]
|
||||
{
|
||||
new RichGraphRoot("entry-1", "runtime", null)
|
||||
},
|
||||
Analyzer: new RichGraphAnalyzer("stellaops.scanner.reachability", "1.0.0", null),
|
||||
Schema: "richgraph-v1"
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Test crypto hash implementation.
|
||||
/// </summary>
|
||||
private sealed class TestCryptoHash : ICryptoHash
|
||||
{
|
||||
public byte[] ComputeHash(ReadOnlySpan<byte> data, string? algorithmId = null)
|
||||
=> System.Security.Cryptography.SHA256.HashData(data);
|
||||
|
||||
public string ComputeHashHex(ReadOnlySpan<byte> data, string? algorithmId = null)
|
||||
=> Convert.ToHexString(ComputeHash(data, algorithmId)).ToLowerInvariant();
|
||||
|
||||
public string ComputeHashBase64(ReadOnlySpan<byte> data, string? algorithmId = null)
|
||||
=> Convert.ToBase64String(ComputeHash(data, algorithmId));
|
||||
|
||||
public async ValueTask<byte[]> ComputeHashAsync(Stream stream, string? algorithmId = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
using var buffer = new MemoryStream();
|
||||
await stream.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
return System.Security.Cryptography.SHA256.HashData(buffer.ToArray());
|
||||
}
|
||||
|
||||
public async ValueTask<string> ComputeHashHexAsync(Stream stream, string? algorithmId = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var hash = await ComputeHashAsync(stream, algorithmId, cancellationToken).ConfigureAwait(false);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
public byte[] ComputeHashForPurpose(ReadOnlySpan<byte> data, string purpose)
|
||||
=> ComputeHash(data);
|
||||
|
||||
public string ComputeHashHexForPurpose(ReadOnlySpan<byte> data, string purpose)
|
||||
=> ComputeHashHex(data);
|
||||
|
||||
public string ComputeHashBase64ForPurpose(ReadOnlySpan<byte> data, string purpose)
|
||||
=> ComputeHashBase64(data);
|
||||
|
||||
public async ValueTask<byte[]> ComputeHashForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default)
|
||||
=> await ComputeHashAsync(stream, null, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
public async ValueTask<string> ComputeHashHexForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default)
|
||||
=> await ComputeHashHexAsync(stream, null, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
public string GetAlgorithmForPurpose(string purpose) => "blake3";
|
||||
|
||||
public string GetHashPrefix(string purpose) => "blake3:";
|
||||
|
||||
public string ComputePrefixedHashForPurpose(ReadOnlySpan<byte> data, string purpose)
|
||||
=> $"blake3:{ComputeHashHex(data)}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"_type": "https://in-toto.io/Statement/v1",
|
||||
"subject": [
|
||||
{
|
||||
"name": "pkg:oci/test-image@sha256:abc123",
|
||||
"digest": {
|
||||
"sha256": "abc123def456789012345678901234567890123456789012345678901234"
|
||||
}
|
||||
}
|
||||
],
|
||||
"predicateType": "https://stellaops.io/attestation/reachabilityWitness/v1",
|
||||
"predicate": {
|
||||
"version": "1.0.0",
|
||||
"analysisTimestamp": "2025-01-01T00:00:00.0000000Z",
|
||||
"analyzer": {
|
||||
"name": "stellaops.scanner.reachability",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"graph": {
|
||||
"schema": "richgraph-v1",
|
||||
"hash": "blake3:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef",
|
||||
"nodeCount": 3,
|
||||
"edgeCount": 2
|
||||
},
|
||||
"summary": {
|
||||
"sinkCount": 1,
|
||||
"entrypointCount": 1,
|
||||
"pathCount": 1,
|
||||
"gateCoverage": 0.0
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
{
|
||||
"_type": "https://in-toto.io/Statement/v1",
|
||||
"subject": [
|
||||
{
|
||||
"name": "pkg:oci/production-app@sha256:xyz789",
|
||||
"digest": {
|
||||
"sha256": "xyz789abc123def456789012345678901234567890123456789012345678"
|
||||
}
|
||||
}
|
||||
],
|
||||
"predicateType": "https://stellaops.io/attestation/reachabilityWitness/v1",
|
||||
"predicate": {
|
||||
"version": "1.0.0",
|
||||
"analysisTimestamp": "2025-01-15T12:30:00.0000000Z",
|
||||
"analyzer": {
|
||||
"name": "stellaops.scanner.reachability",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"graph": {
|
||||
"schema": "richgraph-v1",
|
||||
"hash": "blake3:fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210",
|
||||
"nodeCount": 150,
|
||||
"edgeCount": 340,
|
||||
"casUri": "cas://reachability/graphs/fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210"
|
||||
},
|
||||
"summary": {
|
||||
"sinkCount": 12,
|
||||
"entrypointCount": 8,
|
||||
"pathCount": 45,
|
||||
"gateCoverage": 0.67
|
||||
},
|
||||
"policy": {
|
||||
"hash": "sha256:policy123456789012345678901234567890123456789012345678901234"
|
||||
},
|
||||
"source": {
|
||||
"commit": "a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2"
|
||||
},
|
||||
"runtime": {
|
||||
"observedAt": "2025-01-15T12:25:00.0000000Z",
|
||||
"traceCount": 1250,
|
||||
"coveredPaths": 38,
|
||||
"runtimeConfidence": 0.84
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -206,15 +206,8 @@ public class PathExplanationServiceTests
|
||||
|
||||
private static RichGraph CreateSimpleGraph()
|
||||
{
|
||||
return new RichGraph
|
||||
{
|
||||
Schema = "stellaops.richgraph.v1",
|
||||
Meta = new RichGraphMeta { Hash = "test-hash" },
|
||||
Roots = new[]
|
||||
{
|
||||
new RichGraphRoot("entry-1", "runtime", null)
|
||||
},
|
||||
Nodes = new[]
|
||||
return new RichGraph(
|
||||
Nodes: new[]
|
||||
{
|
||||
new RichGraphNode(
|
||||
Id: "entry-1",
|
||||
@@ -241,21 +234,23 @@ public class PathExplanationServiceTests
|
||||
Attributes: new Dictionary<string, string> { ["is_sink"] = "true" },
|
||||
SymbolDigest: null)
|
||||
},
|
||||
Edges = new[]
|
||||
Edges: new[]
|
||||
{
|
||||
new RichGraphEdge("entry-1", "sink-1", "call", null)
|
||||
}
|
||||
};
|
||||
new RichGraphEdge("entry-1", "sink-1", "call", null, null, null, 1.0, null)
|
||||
},
|
||||
Roots: new[]
|
||||
{
|
||||
new RichGraphRoot("entry-1", "runtime", null)
|
||||
},
|
||||
Analyzer: new RichGraphAnalyzer("test", "1.0", null),
|
||||
Schema: "stellaops.richgraph.v1"
|
||||
);
|
||||
}
|
||||
|
||||
private static RichGraph CreateGraphWithMultipleSinks()
|
||||
{
|
||||
return new RichGraph
|
||||
{
|
||||
Schema = "stellaops.richgraph.v1",
|
||||
Meta = new RichGraphMeta { Hash = "test-hash" },
|
||||
Roots = new[] { new RichGraphRoot("entry-1", "runtime", null) },
|
||||
Nodes = new[]
|
||||
return new RichGraph(
|
||||
Nodes: new[]
|
||||
{
|
||||
new RichGraphNode("entry-1", "Handler", null, null, "java", "handler", null, null, null, null, null),
|
||||
new RichGraphNode("sink-1", "Sink1", null, null, "java", "sink", null, null, null,
|
||||
@@ -263,12 +258,15 @@ public class PathExplanationServiceTests
|
||||
new RichGraphNode("sink-2", "Sink2", null, null, "java", "sink", null, null, null,
|
||||
new Dictionary<string, string> { ["is_sink"] = "true" }, null)
|
||||
},
|
||||
Edges = new[]
|
||||
Edges: new[]
|
||||
{
|
||||
new RichGraphEdge("entry-1", "sink-1", "call", null),
|
||||
new RichGraphEdge("entry-1", "sink-2", "call", null)
|
||||
}
|
||||
};
|
||||
new RichGraphEdge("entry-1", "sink-1", "call", null, null, null, 1.0, null),
|
||||
new RichGraphEdge("entry-1", "sink-2", "call", null, null, null, 1.0, null)
|
||||
},
|
||||
Roots: new[] { new RichGraphRoot("entry-1", "runtime", null) },
|
||||
Analyzer: new RichGraphAnalyzer("test", "1.0", null),
|
||||
Schema: "stellaops.richgraph.v1"
|
||||
);
|
||||
}
|
||||
|
||||
private static RichGraph CreateGraphWithGates()
|
||||
@@ -285,22 +283,21 @@ public class PathExplanationServiceTests
|
||||
}
|
||||
};
|
||||
|
||||
return new RichGraph
|
||||
{
|
||||
Schema = "stellaops.richgraph.v1",
|
||||
Meta = new RichGraphMeta { Hash = "test-hash" },
|
||||
Roots = new[] { new RichGraphRoot("entry-1", "runtime", null) },
|
||||
Nodes = new[]
|
||||
return new RichGraph(
|
||||
Nodes: new[]
|
||||
{
|
||||
new RichGraphNode("entry-1", "Handler", null, null, "java", "handler", null, null, null, null, null),
|
||||
new RichGraphNode("sink-1", "Sink", null, null, "java", "sink", null, null, null,
|
||||
new Dictionary<string, string> { ["is_sink"] = "true" }, null)
|
||||
},
|
||||
Edges = new[]
|
||||
Edges: new[]
|
||||
{
|
||||
new RichGraphEdge("entry-1", "sink-1", "call", gates)
|
||||
}
|
||||
};
|
||||
new RichGraphEdge("entry-1", "sink-1", "call", null, null, null, 1.0, null, gates)
|
||||
},
|
||||
Roots: new[] { new RichGraphRoot("entry-1", "runtime", null) },
|
||||
Analyzer: new RichGraphAnalyzer("test", "1.0", null),
|
||||
Schema: "stellaops.richgraph.v1"
|
||||
);
|
||||
}
|
||||
|
||||
private static RichGraph CreateDeepGraph(int depth)
|
||||
@@ -317,18 +314,17 @@ public class PathExplanationServiceTests
|
||||
|
||||
if (i > 0)
|
||||
{
|
||||
edges.Add(new RichGraphEdge($"node-{i - 1}", $"node-{i}", "call", null));
|
||||
edges.Add(new RichGraphEdge($"node-{i - 1}", $"node-{i}", "call", null, null, null, 1.0, null));
|
||||
}
|
||||
}
|
||||
|
||||
return new RichGraph
|
||||
{
|
||||
Schema = "stellaops.richgraph.v1",
|
||||
Meta = new RichGraphMeta { Hash = "test-hash" },
|
||||
Roots = new[] { new RichGraphRoot("node-0", "runtime", null) },
|
||||
Nodes = nodes,
|
||||
Edges = edges
|
||||
};
|
||||
return new RichGraph(
|
||||
Nodes: nodes,
|
||||
Edges: edges,
|
||||
Roots: new[] { new RichGraphRoot("node-0", "runtime", null) },
|
||||
Analyzer: new RichGraphAnalyzer("test", "1.0", null),
|
||||
Schema: "stellaops.richgraph.v1"
|
||||
);
|
||||
}
|
||||
|
||||
private static RichGraph CreateGraphWithMultiplePaths(int pathCount)
|
||||
@@ -344,17 +340,16 @@ public class PathExplanationServiceTests
|
||||
{
|
||||
nodes.Add(new RichGraphNode($"sink-{i}", $"Sink{i}", null, null, "java", "sink", null, null, null,
|
||||
new Dictionary<string, string> { ["is_sink"] = "true" }, null));
|
||||
edges.Add(new RichGraphEdge("entry-1", $"sink-{i}", "call", null));
|
||||
edges.Add(new RichGraphEdge("entry-1", $"sink-{i}", "call", null, null, null, 1.0, null));
|
||||
}
|
||||
|
||||
return new RichGraph
|
||||
{
|
||||
Schema = "stellaops.richgraph.v1",
|
||||
Meta = new RichGraphMeta { Hash = "test-hash" },
|
||||
Roots = new[] { new RichGraphRoot("entry-1", "runtime", null) },
|
||||
Nodes = nodes,
|
||||
Edges = edges
|
||||
};
|
||||
return new RichGraph(
|
||||
Nodes: nodes,
|
||||
Edges: edges,
|
||||
Roots: new[] { new RichGraphRoot("entry-1", "runtime", null) },
|
||||
Analyzer: new RichGraphAnalyzer("test", "1.0", null),
|
||||
Schema: "stellaops.richgraph.v1"
|
||||
);
|
||||
}
|
||||
|
||||
private static ExplainedPath CreateTestPath()
|
||||
@@ -364,7 +359,7 @@ public class PathExplanationServiceTests
|
||||
PathId = "entry:sink:0",
|
||||
SinkId = "sink-1",
|
||||
SinkSymbol = "DB.query",
|
||||
SinkCategory = SinkCategory.SqlRaw,
|
||||
SinkCategory = Explanation.SinkCategory.SqlRaw,
|
||||
EntrypointId = "entry-1",
|
||||
EntrypointSymbol = "Handler.handle",
|
||||
EntrypointType = EntrypointType.HttpEndpoint,
|
||||
@@ -402,7 +397,7 @@ public class PathExplanationServiceTests
|
||||
PathId = "entry:sink:0",
|
||||
SinkId = "sink-1",
|
||||
SinkSymbol = "DB.query",
|
||||
SinkCategory = SinkCategory.SqlRaw,
|
||||
SinkCategory = Explanation.SinkCategory.SqlRaw,
|
||||
EntrypointId = "entry-1",
|
||||
EntrypointSymbol = "Handler.handle",
|
||||
EntrypointType = EntrypointType.HttpEndpoint,
|
||||
|
||||
@@ -132,6 +132,6 @@ public class RichGraphWriterTests
|
||||
|
||||
// Verify meta.json also contains the blake3-prefixed hash
|
||||
var metaJson = await File.ReadAllTextAsync(result.MetaPath);
|
||||
Assert.Contains("\"graph_hash\":\"blake3:", metaJson);
|
||||
Assert.Contains("\"graph_hash\": \"blake3:", metaJson);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,14 +10,30 @@ import {
|
||||
EvidencePanelMetricsService,
|
||||
EvidencePanelAction,
|
||||
} from './evidence-panel-metrics.service';
|
||||
import { APP_CONFIG } from '../config/app.config';
|
||||
import { APP_CONFIG, AppConfig } from '../config/app-config.model';
|
||||
|
||||
describe('EvidencePanelMetricsService', () => {
|
||||
let service: EvidencePanelMetricsService;
|
||||
let httpMock: HttpTestingController;
|
||||
|
||||
const mockConfig = {
|
||||
apiBaseUrl: 'http://localhost:5000/api',
|
||||
const mockConfig: AppConfig = {
|
||||
authority: {
|
||||
issuer: 'https://auth.stellaops.test/',
|
||||
clientId: 'ui-client',
|
||||
authorizeEndpoint: 'https://auth.stellaops.test/connect/authorize',
|
||||
tokenEndpoint: 'https://auth.stellaops.test/connect/token',
|
||||
redirectUri: 'https://ui.stellaops.test/auth/callback',
|
||||
scope: 'openid profile email ui.read',
|
||||
audience: 'https://scanner.stellaops.test',
|
||||
},
|
||||
apiBaseUrls: {
|
||||
gateway: 'http://localhost:5000/api',
|
||||
authority: 'https://auth.stellaops.test',
|
||||
scanner: 'https://scanner.stellaops.test',
|
||||
policy: 'https://policy.stellaops.test',
|
||||
concelier: 'https://concelier.stellaops.test',
|
||||
attestor: 'https://attestor.stellaops.test',
|
||||
},
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
@@ -197,7 +213,7 @@ describe('EvidencePanelMetricsService', () => {
|
||||
}
|
||||
|
||||
// Expect POST to metrics endpoint
|
||||
const req = httpMock.expectOne(`${mockConfig.apiBaseUrl}/metrics/evidence-panel`);
|
||||
const req = httpMock.expectOne(`${mockConfig.apiBaseUrls.gateway}/metrics/evidence-panel`);
|
||||
expect(req.request.method).toBe('POST');
|
||||
expect(req.request.body.sessions.length).toBe(10);
|
||||
|
||||
@@ -213,7 +229,7 @@ describe('EvidencePanelMetricsService', () => {
|
||||
service.endSession();
|
||||
}
|
||||
|
||||
const req = httpMock.expectOne(`${mockConfig.apiBaseUrl}/metrics/evidence-panel`);
|
||||
const req = httpMock.expectOne(`${mockConfig.apiBaseUrls.gateway}/metrics/evidence-panel`);
|
||||
const sessions = req.request.body.sessions;
|
||||
|
||||
expect(sessions[0]).toEqual(jasmine.objectContaining({
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
|
||||
import { Injectable, signal, computed, inject } from '@angular/core';
|
||||
import { HttpClient } from '@angular/common/http';
|
||||
import { APP_CONFIG, AppConfig } from '../config/app.config';
|
||||
import { APP_CONFIG, AppConfig } from '../config/app-config.model';
|
||||
|
||||
/**
|
||||
* Types of actions tracked in the Evidence Panel
|
||||
@@ -243,7 +243,7 @@ export class EvidencePanelMetricsService {
|
||||
|
||||
// Fire-and-forget POST to metrics endpoint
|
||||
this.http.post(
|
||||
`${this.config.apiBaseUrl}/metrics/evidence-panel`,
|
||||
`${this.resolveMetricsBaseUrl()}/metrics/evidence-panel`,
|
||||
{
|
||||
sessions: sessions.map(s => ({
|
||||
sessionId: s.sessionId,
|
||||
@@ -264,6 +264,10 @@ export class EvidencePanelMetricsService {
|
||||
});
|
||||
}
|
||||
|
||||
private resolveMetricsBaseUrl(): string {
|
||||
return this.config.apiBaseUrls.gateway ?? this.config.apiBaseUrls.scanner;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current metrics summary for debugging/display
|
||||
*/
|
||||
|
||||
@@ -194,11 +194,11 @@ export class TriageEvidenceHttpClient implements TriageEvidenceApi {
|
||||
}
|
||||
}
|
||||
|
||||
private buildParams(options?: Record<string, unknown>): HttpParams {
|
||||
private buildParams(options?: object): HttpParams {
|
||||
let params = new HttpParams();
|
||||
|
||||
if (options) {
|
||||
for (const [key, value] of Object.entries(options)) {
|
||||
for (const [key, value] of Object.entries(options as Record<string, unknown>)) {
|
||||
if (value !== undefined && value !== null && key !== 'tenantId' && key !== 'traceId') {
|
||||
params = params.set(key, String(value));
|
||||
}
|
||||
|
||||
@@ -83,13 +83,15 @@ export class TelemetrySamplerService {
|
||||
}
|
||||
|
||||
private createSessionId(): string {
|
||||
if (typeof crypto !== 'undefined' && 'randomUUID' in crypto) {
|
||||
return crypto.randomUUID();
|
||||
const cryptoApi = this.getCryptoApi();
|
||||
|
||||
if (cryptoApi?.randomUUID) {
|
||||
return cryptoApi.randomUUID();
|
||||
}
|
||||
|
||||
if (typeof crypto !== 'undefined' && 'getRandomValues' in crypto) {
|
||||
if (cryptoApi?.getRandomValues) {
|
||||
const bytes = new Uint8Array(16);
|
||||
crypto.getRandomValues(bytes);
|
||||
cryptoApi.getRandomValues(bytes);
|
||||
return Array.from(bytes, (b) => b.toString(16).padStart(2, '0')).join('');
|
||||
}
|
||||
|
||||
@@ -97,13 +99,21 @@ export class TelemetrySamplerService {
|
||||
}
|
||||
|
||||
private createSampleValue(): number {
|
||||
if (typeof crypto !== 'undefined' && 'getRandomValues' in crypto) {
|
||||
const cryptoApi = this.getCryptoApi();
|
||||
|
||||
if (cryptoApi?.getRandomValues) {
|
||||
const bytes = new Uint32Array(1);
|
||||
crypto.getRandomValues(bytes);
|
||||
cryptoApi.getRandomValues(bytes);
|
||||
return bytes[0] / 0x1_0000_0000;
|
||||
}
|
||||
|
||||
return Math.random();
|
||||
}
|
||||
}
|
||||
|
||||
private getCryptoApi(): Crypto | null {
|
||||
if (typeof globalThis === 'undefined') return null;
|
||||
|
||||
const value = (globalThis as unknown as { crypto?: Crypto }).crypto;
|
||||
return value ?? null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
import { HttpClientTestingModule } from '@angular/common/http/testing';
|
||||
|
||||
import type { EvidenceApi } from '../../core/api/evidence.client';
|
||||
import { EVIDENCE_API } from '../../core/api/evidence.client';
|
||||
import type { EvidenceData, VexDecision, VexStatus } from '../../core/api/evidence.models';
|
||||
import { APP_CONFIG, type AppConfig } from '../../core/config/app-config.model';
|
||||
import { EvidencePanelComponent } from './evidence-panel.component';
|
||||
|
||||
function createVexDecision(status: VexStatus, id: string): VexDecision {
|
||||
@@ -32,8 +34,31 @@ describe('EvidencePanelComponent', () => {
|
||||
]);
|
||||
|
||||
await TestBed.configureTestingModule({
|
||||
imports: [EvidencePanelComponent],
|
||||
providers: [{ provide: EVIDENCE_API, useValue: api }],
|
||||
imports: [HttpClientTestingModule, EvidencePanelComponent],
|
||||
providers: [
|
||||
{ provide: EVIDENCE_API, useValue: api },
|
||||
{
|
||||
provide: APP_CONFIG,
|
||||
useValue: {
|
||||
authority: {
|
||||
issuer: 'https://auth.stellaops.test/',
|
||||
clientId: 'ui-client',
|
||||
authorizeEndpoint: 'https://auth.stellaops.test/connect/authorize',
|
||||
tokenEndpoint: 'https://auth.stellaops.test/connect/token',
|
||||
redirectUri: 'https://ui.stellaops.test/auth/callback',
|
||||
scope: 'openid profile email ui.read',
|
||||
audience: 'https://scanner.stellaops.test',
|
||||
},
|
||||
apiBaseUrls: {
|
||||
authority: 'https://auth.stellaops.test',
|
||||
scanner: 'https://scanner.stellaops.test',
|
||||
policy: 'https://policy.stellaops.test',
|
||||
concelier: 'https://concelier.stellaops.test',
|
||||
attestor: 'https://attestor.stellaops.test',
|
||||
},
|
||||
} satisfies AppConfig,
|
||||
},
|
||||
],
|
||||
}).compileComponents();
|
||||
|
||||
fixture = TestBed.createComponent(EvidencePanelComponent);
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
/**
|
||||
* PathViewerComponent barrel export
|
||||
*/
|
||||
export * from './path-viewer.component';
|
||||
@@ -0,0 +1,110 @@
|
||||
<!--
|
||||
PathViewerComponent Template
|
||||
Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
|
||||
Task: UI-003
|
||||
-->
|
||||
<div class="path-viewer" [class.path-viewer--collapsed]="collapsed()">
|
||||
<!-- Header -->
|
||||
<div class="path-viewer__header">
|
||||
<span class="path-viewer__title">{{ title() }}</span>
|
||||
<div class="path-viewer__actions">
|
||||
@if (hiddenNodeCount() > 0) {
|
||||
<button
|
||||
type="button"
|
||||
class="path-viewer__btn path-viewer__btn--expand"
|
||||
(click)="toggleExpand()"
|
||||
[attr.aria-expanded]="isExpanded()">
|
||||
@if (isExpanded()) {
|
||||
Collapse ({{ hiddenNodeCount() }} hidden)
|
||||
} @else {
|
||||
Expand (+{{ hiddenNodeCount() }} nodes)
|
||||
}
|
||||
</button>
|
||||
}
|
||||
@if (collapsible()) {
|
||||
<button
|
||||
type="button"
|
||||
class="path-viewer__btn path-viewer__btn--collapse"
|
||||
(click)="toggleCollapse()"
|
||||
[attr.aria-expanded]="!collapsed()">
|
||||
{{ collapsed() ? 'Show' : 'Hide' }}
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Content -->
|
||||
@if (!collapsed()) {
|
||||
<div class="path-viewer__content">
|
||||
<!-- Path visualization -->
|
||||
<ol class="path-viewer__nodes" role="list">
|
||||
@for (node of displayNodes(); track node.nodeId; let i = $index; let last = $last) {
|
||||
<!-- Node -->
|
||||
<li
|
||||
[class]="getNodeClass(node)"
|
||||
(click)="onNodeClick(node)"
|
||||
(keydown.enter)="onNodeClick(node)"
|
||||
tabindex="0"
|
||||
role="listitem"
|
||||
[attr.aria-label]="node.symbol">
|
||||
<span class="path-node__icon" [attr.aria-hidden]="true">
|
||||
{{ getNodeIcon(node) }}
|
||||
</span>
|
||||
<div class="path-node__details">
|
||||
<span class="path-node__symbol">{{ node.symbol }}</span>
|
||||
@if (node.file) {
|
||||
<span class="path-node__location">
|
||||
{{ node.file }}@if (node.line) {:{{ node.line }}}
|
||||
</span>
|
||||
}
|
||||
@if (node.package) {
|
||||
<span class="path-node__package">{{ node.package }}</span>
|
||||
}
|
||||
@if (showConfidence() && node.confidence !== undefined) {
|
||||
<span class="path-node__confidence">
|
||||
{{ formatConfidence(node.confidence) }}
|
||||
</span>
|
||||
}
|
||||
@if (highlightChanges() && node.isChanged && node.changeKind) {
|
||||
<span class="path-node__change-badge" [class]="'path-node__change-badge--' + node.changeKind">
|
||||
{{ formatChangeKind(node.changeKind) }}
|
||||
</span>
|
||||
}
|
||||
@if (node.nodeType === 'entrypoint') {
|
||||
<span class="path-node__type-badge path-node__type-badge--entrypoint">
|
||||
ENTRYPOINT
|
||||
</span>
|
||||
}
|
||||
@if (node.nodeType === 'sink') {
|
||||
<span class="path-node__type-badge path-node__type-badge--sink">
|
||||
SINK
|
||||
</span>
|
||||
}
|
||||
@if (node.nodeType === 'gate') {
|
||||
<span class="path-node__type-badge path-node__type-badge--gate">
|
||||
GATE
|
||||
</span>
|
||||
}
|
||||
</div>
|
||||
</li>
|
||||
|
||||
<!-- Connector -->
|
||||
@if (!last) {
|
||||
<li class="path-viewer__connector" role="presentation" aria-hidden="true">
|
||||
<span class="path-viewer__connector-line"></span>
|
||||
</li>
|
||||
}
|
||||
}
|
||||
</ol>
|
||||
|
||||
<!-- Hidden nodes indicator -->
|
||||
@if (hiddenNodeCount() > 0 && !isExpanded()) {
|
||||
<div class="path-viewer__hidden-indicator">
|
||||
<span class="path-viewer__hidden-text">
|
||||
… {{ hiddenNodeCount() }} intermediate node(s) hidden …
|
||||
</span>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
@@ -0,0 +1,296 @@
|
||||
/**
|
||||
* PathViewerComponent Styles
|
||||
* Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
|
||||
* Task: UI-004
|
||||
*/
|
||||
|
||||
// Variables
|
||||
$color-entrypoint: #10b981; // Green
|
||||
$color-sink: #ef4444; // Red
|
||||
$color-gate: #f59e0b; // Amber
|
||||
$color-changed: #8b5cf6; // Purple
|
||||
$color-added: #22c55e;
|
||||
$color-removed: #ef4444;
|
||||
$color-modified: #f59e0b;
|
||||
$color-border: #e5e7eb;
|
||||
$color-bg: #ffffff;
|
||||
$color-bg-hover: #f9fafb;
|
||||
$color-text: #111827;
|
||||
$color-text-muted: #6b7280;
|
||||
|
||||
.path-viewer {
|
||||
font-family: var(--font-family-sans, system-ui, sans-serif);
|
||||
background: $color-bg;
|
||||
border: 1px solid $color-border;
|
||||
border-radius: 8px;
|
||||
overflow: hidden;
|
||||
|
||||
&--collapsed {
|
||||
.path-viewer__content {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
&__header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
padding: 12px 16px;
|
||||
border-bottom: 1px solid $color-border;
|
||||
background: #f9fafb;
|
||||
}
|
||||
|
||||
&__title {
|
||||
font-weight: 600;
|
||||
font-size: 14px;
|
||||
color: $color-text;
|
||||
}
|
||||
|
||||
&__actions {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
&__btn {
|
||||
padding: 4px 12px;
|
||||
font-size: 12px;
|
||||
font-weight: 500;
|
||||
border: 1px solid $color-border;
|
||||
border-radius: 4px;
|
||||
background: $color-bg;
|
||||
color: $color-text-muted;
|
||||
cursor: pointer;
|
||||
transition: all 0.15s ease;
|
||||
|
||||
&:hover {
|
||||
background: $color-bg-hover;
|
||||
color: $color-text;
|
||||
}
|
||||
|
||||
&:focus-visible {
|
||||
outline: 2px solid #3b82f6;
|
||||
outline-offset: 2px;
|
||||
}
|
||||
|
||||
&--expand {
|
||||
color: #3b82f6;
|
||||
border-color: #3b82f6;
|
||||
|
||||
&:hover {
|
||||
background: #eff6ff;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
&__content {
|
||||
padding: 16px;
|
||||
}
|
||||
|
||||
&__nodes {
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
&__connector {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
padding: 4px 0;
|
||||
|
||||
&-line {
|
||||
width: 2px;
|
||||
height: 16px;
|
||||
background: $color-border;
|
||||
}
|
||||
}
|
||||
|
||||
&__hidden-indicator {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
padding: 8px 0;
|
||||
}
|
||||
|
||||
&__hidden-text {
|
||||
font-size: 12px;
|
||||
font-style: italic;
|
||||
color: $color-text-muted;
|
||||
}
|
||||
}
|
||||
|
||||
.path-node {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 12px;
|
||||
padding: 12px;
|
||||
border: 1px solid $color-border;
|
||||
border-radius: 6px;
|
||||
background: $color-bg;
|
||||
cursor: pointer;
|
||||
transition: all 0.15s ease;
|
||||
|
||||
&:hover {
|
||||
background: $color-bg-hover;
|
||||
border-color: #d1d5db;
|
||||
}
|
||||
|
||||
&:focus-visible {
|
||||
outline: 2px solid #3b82f6;
|
||||
outline-offset: 2px;
|
||||
}
|
||||
|
||||
&__icon {
|
||||
flex-shrink: 0;
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
font-size: 14px;
|
||||
border-radius: 50%;
|
||||
background: #f3f4f6;
|
||||
color: $color-text-muted;
|
||||
}
|
||||
|
||||
&__details {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
&__symbol {
|
||||
font-weight: 500;
|
||||
font-size: 14px;
|
||||
font-family: var(--font-family-mono, 'SF Mono', Consolas, monospace);
|
||||
color: $color-text;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
&__location {
|
||||
font-size: 12px;
|
||||
color: $color-text-muted;
|
||||
font-family: var(--font-family-mono, 'SF Mono', Consolas, monospace);
|
||||
}
|
||||
|
||||
&__package {
|
||||
font-size: 11px;
|
||||
color: $color-text-muted;
|
||||
background: #f3f4f6;
|
||||
padding: 2px 6px;
|
||||
border-radius: 4px;
|
||||
width: fit-content;
|
||||
}
|
||||
|
||||
&__confidence {
|
||||
font-size: 11px;
|
||||
color: $color-text-muted;
|
||||
background: #e0e7ff;
|
||||
padding: 2px 6px;
|
||||
border-radius: 4px;
|
||||
width: fit-content;
|
||||
}
|
||||
|
||||
&__change-badge {
|
||||
font-size: 10px;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
padding: 2px 6px;
|
||||
border-radius: 4px;
|
||||
width: fit-content;
|
||||
|
||||
&--added {
|
||||
background: #dcfce7;
|
||||
color: #166534;
|
||||
}
|
||||
|
||||
&--removed {
|
||||
background: #fee2e2;
|
||||
color: #991b1b;
|
||||
}
|
||||
|
||||
&--modified {
|
||||
background: #fef3c7;
|
||||
color: #92400e;
|
||||
}
|
||||
}
|
||||
|
||||
&__type-badge {
|
||||
font-size: 10px;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
padding: 2px 6px;
|
||||
border-radius: 4px;
|
||||
width: fit-content;
|
||||
|
||||
&--entrypoint {
|
||||
background: #d1fae5;
|
||||
color: #065f46;
|
||||
}
|
||||
|
||||
&--sink {
|
||||
background: #fee2e2;
|
||||
color: #991b1b;
|
||||
}
|
||||
|
||||
&--gate {
|
||||
background: #fef3c7;
|
||||
color: #92400e;
|
||||
}
|
||||
}
|
||||
|
||||
// Node type variants
|
||||
&--entrypoint {
|
||||
border-color: $color-entrypoint;
|
||||
|
||||
.path-node__icon {
|
||||
background: #d1fae5;
|
||||
color: $color-entrypoint;
|
||||
}
|
||||
}
|
||||
|
||||
&--sink {
|
||||
border-color: $color-sink;
|
||||
|
||||
.path-node__icon {
|
||||
background: #fee2e2;
|
||||
color: $color-sink;
|
||||
}
|
||||
}
|
||||
|
||||
&--gate {
|
||||
border-color: $color-gate;
|
||||
|
||||
.path-node__icon {
|
||||
background: #fef3c7;
|
||||
color: $color-gate;
|
||||
}
|
||||
}
|
||||
|
||||
// Changed state
|
||||
&--changed {
|
||||
border-color: $color-changed;
|
||||
background: #faf5ff;
|
||||
|
||||
.path-node__icon {
|
||||
background: #ede9fe;
|
||||
color: $color-changed;
|
||||
}
|
||||
}
|
||||
|
||||
&--added {
|
||||
border-color: $color-added;
|
||||
background: #f0fdf4;
|
||||
}
|
||||
|
||||
&--removed {
|
||||
border-color: $color-removed;
|
||||
background: #fef2f2;
|
||||
}
|
||||
|
||||
&--modified {
|
||||
border-color: $color-modified;
|
||||
background: #fffbeb;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
/**
|
||||
* PathViewerComponent - Call Path Visualization
|
||||
* Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
|
||||
* Task: UI-003
|
||||
*/
|
||||
|
||||
import { Component, input, output, computed, signal } from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { CompressedPath, PathNode, ExpandedPath, PathEdge } from '../../models/path-viewer.models';
|
||||
|
||||
/**
|
||||
* Visualizes reachability call paths from entrypoint to sink.
|
||||
* Supports both compressed and expanded views.
|
||||
*
|
||||
* @example
|
||||
* ```html
|
||||
* <app-path-viewer
|
||||
* [path]="compressedPath"
|
||||
* [collapsible]="true"
|
||||
* [showConfidence]="true"
|
||||
* (nodeClick)="onNodeClick($event)"
|
||||
* (expandRequest)="onExpandPath($event)">
|
||||
* </app-path-viewer>
|
||||
* ```
|
||||
*/
|
||||
@Component({
|
||||
selector: 'app-path-viewer',
|
||||
standalone: true,
|
||||
imports: [CommonModule],
|
||||
templateUrl: './path-viewer.component.html',
|
||||
styleUrl: './path-viewer.component.scss'
|
||||
})
|
||||
export class PathViewerComponent {
|
||||
/** The compressed path to display */
|
||||
path = input.required<CompressedPath>();
|
||||
|
||||
/** Optional title for the path viewer */
|
||||
title = input<string>('Reachability Path');
|
||||
|
||||
/** Whether the viewer can be collapsed */
|
||||
collapsible = input<boolean>(true);
|
||||
|
||||
/** Whether to show confidence scores */
|
||||
showConfidence = input<boolean>(false);
|
||||
|
||||
/** Whether to highlight changed nodes */
|
||||
highlightChanges = input<boolean>(true);
|
||||
|
||||
/** Maximum depth to show before collapsing */
|
||||
maxVisibleDepth = input<number>(5);
|
||||
|
||||
/** Emits when a node is clicked */
|
||||
nodeClick = output<PathNode>();
|
||||
|
||||
/** Emits when path expansion is requested */
|
||||
expandRequest = output<string>();
|
||||
|
||||
/** Internal collapsed state */
|
||||
collapsed = signal<boolean>(false);
|
||||
|
||||
/** Whether the full path is expanded */
|
||||
isExpanded = signal<boolean>(false);
|
||||
|
||||
/** Computed: effective nodes to display */
|
||||
displayNodes = computed(() => {
|
||||
const p = this.path();
|
||||
if (this.isExpanded()) {
|
||||
return this.buildFullNodeList(p);
|
||||
}
|
||||
return [p.entrypoint, ...p.keyNodes, p.sink];
|
||||
});
|
||||
|
||||
/** Computed: count of hidden nodes */
|
||||
hiddenNodeCount = computed(() => {
|
||||
const p = this.path();
|
||||
if (this.isExpanded()) {
|
||||
return 0;
|
||||
}
|
||||
return Math.max(0, p.intermediateCount - p.keyNodes.length);
|
||||
});
|
||||
|
||||
/** Toggle collapsed state */
|
||||
toggleCollapse(): void {
|
||||
this.collapsed.update(v => !v);
|
||||
}
|
||||
|
||||
/** Toggle expanded state */
|
||||
toggleExpand(): void {
|
||||
const p = this.path();
|
||||
if (!this.isExpanded() && p.fullPath && p.fullPath.length > 0) {
|
||||
this.expandRequest.emit(p.fullPath[0]);
|
||||
}
|
||||
this.isExpanded.update(v => !v);
|
||||
}
|
||||
|
||||
/** Handle node click */
|
||||
onNodeClick(node: PathNode): void {
|
||||
this.nodeClick.emit(node);
|
||||
}
|
||||
|
||||
/** Get CSS class for node type */
|
||||
getNodeClass(node: PathNode): string {
|
||||
const classes: string[] = ['path-node'];
|
||||
|
||||
if (node.nodeType) {
|
||||
classes.push(`path-node--${node.nodeType}`);
|
||||
}
|
||||
|
||||
if (this.highlightChanges() && node.isChanged) {
|
||||
classes.push('path-node--changed');
|
||||
if (node.changeKind) {
|
||||
classes.push(`path-node--${node.changeKind}`);
|
||||
}
|
||||
}
|
||||
|
||||
return classes.join(' ');
|
||||
}
|
||||
|
||||
/** Get icon for node type */
|
||||
getNodeIcon(node: PathNode): string {
|
||||
if (node.isChanged) {
|
||||
return '●';
|
||||
}
|
||||
|
||||
switch (node.nodeType) {
|
||||
case 'entrypoint':
|
||||
return '▶';
|
||||
case 'sink':
|
||||
return '⚠';
|
||||
case 'gate':
|
||||
return '◆';
|
||||
default:
|
||||
return '○';
|
||||
}
|
||||
}
|
||||
|
||||
/** Format change kind for display */
|
||||
formatChangeKind(kind?: string): string {
|
||||
if (!kind) return '';
|
||||
return kind.charAt(0).toUpperCase() + kind.slice(1);
|
||||
}
|
||||
|
||||
/** Format confidence as percentage */
|
||||
formatConfidence(confidence?: number): string {
|
||||
if (confidence === undefined) return '';
|
||||
return `${Math.round(confidence * 100)}%`;
|
||||
}
|
||||
|
||||
/** Build full node list from path */
|
||||
private buildFullNodeList(path: CompressedPath): PathNode[] {
|
||||
// For now, return compressed representation
|
||||
// Full expansion requires additional data
|
||||
return [path.entrypoint, ...path.keyNodes, path.sink];
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
/**
|
||||
* RiskDriftCardComponent barrel export
|
||||
*/
|
||||
export * from './risk-drift-card.component';
|
||||
@@ -0,0 +1,136 @@
|
||||
<!--
|
||||
RiskDriftCardComponent Template
|
||||
Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
|
||||
Task: UI-007
|
||||
-->
|
||||
<article class="risk-drift-card" [class.risk-drift-card--compact]="compact()">
|
||||
<!-- Header -->
|
||||
<header class="risk-drift-card__header">
|
||||
<div class="risk-drift-card__title">
|
||||
<h3 class="risk-drift-card__heading">Reachability Drift</h3>
|
||||
@if (showAttestation() && isSigned()) {
|
||||
<span class="risk-drift-card__attestation-badge" title="Signed with DSSE">
|
||||
✓ Attested
|
||||
</span>
|
||||
}
|
||||
</div>
|
||||
<time class="risk-drift-card__time" [attr.datetime]="drift().comparedAt">
|
||||
{{ formatTime(drift().comparedAt) }}
|
||||
</time>
|
||||
</header>
|
||||
|
||||
<!-- Summary metrics -->
|
||||
<div class="risk-drift-card__summary">
|
||||
<!-- Risk trend -->
|
||||
<div class="risk-drift-card__metric risk-drift-card__metric--trend">
|
||||
<span class="risk-drift-card__trend" [class]="trendClass()">
|
||||
<span class="risk-drift-card__trend-icon">{{ trendIcon() }}</span>
|
||||
<span class="risk-drift-card__trend-label">
|
||||
{{ summary().riskTrend | titlecase }}
|
||||
</span>
|
||||
</span>
|
||||
<span class="risk-drift-card__delta" [class.positive]="summary().netRiskDelta > 0" [class.negative]="summary().netRiskDelta < 0">
|
||||
{{ formatRiskDelta(summary().netRiskDelta) }}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<!-- Key stats -->
|
||||
@if (!compact()) {
|
||||
<div class="risk-drift-card__stats">
|
||||
<div class="risk-drift-card__stat">
|
||||
<span class="risk-drift-card__stat-value">{{ summary().increasedReachability }}</span>
|
||||
<span class="risk-drift-card__stat-label">Increased</span>
|
||||
</div>
|
||||
<div class="risk-drift-card__stat">
|
||||
<span class="risk-drift-card__stat-value">{{ summary().decreasedReachability }}</span>
|
||||
<span class="risk-drift-card__stat-label">Decreased</span>
|
||||
</div>
|
||||
<div class="risk-drift-card__stat">
|
||||
<span class="risk-drift-card__stat-value">{{ summary().newSinks }}</span>
|
||||
<span class="risk-drift-card__stat-label">New</span>
|
||||
</div>
|
||||
<div class="risk-drift-card__stat">
|
||||
<span class="risk-drift-card__stat-value">{{ summary().removedSinks }}</span>
|
||||
<span class="risk-drift-card__stat-label">Removed</span>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
|
||||
<!-- Severity breakdown -->
|
||||
<div class="risk-drift-card__severity-bar">
|
||||
@if (summary().bySeverity.critical > 0) {
|
||||
<span class="risk-drift-card__severity risk-drift-card__severity--critical" [title]="'Critical: ' + summary().bySeverity.critical">
|
||||
{{ summary().bySeverity.critical }}
|
||||
</span>
|
||||
}
|
||||
@if (summary().bySeverity.high > 0) {
|
||||
<span class="risk-drift-card__severity risk-drift-card__severity--high" [title]="'High: ' + summary().bySeverity.high">
|
||||
{{ summary().bySeverity.high }}
|
||||
</span>
|
||||
}
|
||||
@if (summary().bySeverity.medium > 0) {
|
||||
<span class="risk-drift-card__severity risk-drift-card__severity--medium" [title]="'Medium: ' + summary().bySeverity.medium">
|
||||
{{ summary().bySeverity.medium }}
|
||||
</span>
|
||||
}
|
||||
@if (summary().bySeverity.low > 0) {
|
||||
<span class="risk-drift-card__severity risk-drift-card__severity--low" [title]="'Low: ' + summary().bySeverity.low">
|
||||
{{ summary().bySeverity.low }}
|
||||
</span>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Preview sinks -->
|
||||
@if (!compact() && previewSinks().length > 0) {
|
||||
<div class="risk-drift-card__preview">
|
||||
<h4 class="risk-drift-card__preview-title">Top Drifted Sinks</h4>
|
||||
<ul class="risk-drift-card__sink-list">
|
||||
@for (sink of previewSinks(); track sink.sink.nodeId) {
|
||||
<li
|
||||
class="risk-drift-card__sink-item"
|
||||
(click)="onSinkClick(sink)"
|
||||
(keydown.enter)="onSinkClick(sink)"
|
||||
tabindex="0"
|
||||
role="button">
|
||||
<span class="risk-drift-card__sink-icon" [class]="getSeverityClass(sink.severity)">
|
||||
@if (sink.isRiskIncrease) { ↑ } @else { ↓ }
|
||||
</span>
|
||||
<div class="risk-drift-card__sink-details">
|
||||
<span class="risk-drift-card__sink-name">{{ sink.sink.symbol }}</span>
|
||||
@if (sink.cveId) {
|
||||
<span class="risk-drift-card__sink-cve">{{ sink.cveId }}</span>
|
||||
}
|
||||
<span class="risk-drift-card__sink-bucket">
|
||||
{{ getBucketLabel(sink.previousBucket) }} → {{ getBucketLabel(sink.currentBucket) }}
|
||||
</span>
|
||||
</div>
|
||||
<span class="risk-drift-card__sink-delta" [class.positive]="sink.riskDelta > 0" [class.negative]="sink.riskDelta < 0">
|
||||
{{ formatRiskDelta(sink.riskDelta) }}
|
||||
</span>
|
||||
</li>
|
||||
}
|
||||
</ul>
|
||||
@if (additionalSinksCount() > 0) {
|
||||
<p class="risk-drift-card__more">
|
||||
+{{ additionalSinksCount() }} more sinks
|
||||
</p>
|
||||
}
|
||||
</div>
|
||||
}
|
||||
|
||||
<!-- Actions -->
|
||||
<footer class="risk-drift-card__footer">
|
||||
@if (drift().pullRequestNumber) {
|
||||
<span class="risk-drift-card__pr">
|
||||
PR #{{ drift().pullRequestNumber }}
|
||||
</span>
|
||||
}
|
||||
<button
|
||||
type="button"
|
||||
class="risk-drift-card__btn"
|
||||
(click)="onViewDetails()">
|
||||
View Details
|
||||
</button>
|
||||
</footer>
|
||||
</article>
|
||||
@@ -0,0 +1,348 @@
|
||||
/**
|
||||
* RiskDriftCardComponent Styles
|
||||
* Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
|
||||
* Task: UI-008
|
||||
*/
|
||||
|
||||
// Variables
|
||||
$color-critical: #dc2626;
|
||||
$color-high: #ea580c;
|
||||
$color-medium: #d97706;
|
||||
$color-low: #ca8a04;
|
||||
$color-info: #6b7280;
|
||||
$color-positive: #dc2626; // risk increase is bad
|
||||
$color-negative: #16a34a; // risk decrease is good
|
||||
$color-border: #e5e7eb;
|
||||
$color-bg: #ffffff;
|
||||
$color-bg-hover: #f9fafb;
|
||||
$color-text: #111827;
|
||||
$color-text-muted: #6b7280;
|
||||
|
||||
.risk-drift-card {
|
||||
font-family: var(--font-family-sans, system-ui, sans-serif);
|
||||
background: $color-bg;
|
||||
border: 1px solid $color-border;
|
||||
border-radius: 12px;
|
||||
overflow: hidden;
|
||||
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
||||
|
||||
&--compact {
|
||||
.risk-drift-card__preview,
|
||||
.risk-drift-card__stats {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
&__header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
padding: 16px 20px;
|
||||
border-bottom: 1px solid $color-border;
|
||||
background: #fafafa;
|
||||
}
|
||||
|
||||
&__title {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
&__heading {
|
||||
margin: 0;
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
color: $color-text;
|
||||
}
|
||||
|
||||
&__attestation-badge {
|
||||
font-size: 11px;
|
||||
font-weight: 500;
|
||||
color: #059669;
|
||||
background: #d1fae5;
|
||||
padding: 2px 8px;
|
||||
border-radius: 9999px;
|
||||
}
|
||||
|
||||
&__time {
|
||||
font-size: 12px;
|
||||
color: $color-text-muted;
|
||||
}
|
||||
|
||||
&__summary {
|
||||
padding: 20px;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
&__metric--trend {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
&__trend {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
font-weight: 600;
|
||||
font-size: 18px;
|
||||
|
||||
&--increasing {
|
||||
color: $color-positive;
|
||||
}
|
||||
|
||||
&--decreasing {
|
||||
color: $color-negative;
|
||||
}
|
||||
|
||||
&--stable {
|
||||
color: $color-text-muted;
|
||||
}
|
||||
}
|
||||
|
||||
&__trend-icon {
|
||||
font-size: 24px;
|
||||
}
|
||||
|
||||
&__delta {
|
||||
font-size: 24px;
|
||||
font-weight: 700;
|
||||
font-family: var(--font-family-mono, 'SF Mono', Consolas, monospace);
|
||||
|
||||
&.positive {
|
||||
color: $color-positive;
|
||||
}
|
||||
|
||||
&.negative {
|
||||
color: $color-negative;
|
||||
}
|
||||
}
|
||||
|
||||
&__stats {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(4, 1fr);
|
||||
gap: 16px;
|
||||
padding-top: 16px;
|
||||
border-top: 1px solid $color-border;
|
||||
}
|
||||
|
||||
&__stat {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
&__stat-value {
|
||||
font-size: 20px;
|
||||
font-weight: 600;
|
||||
color: $color-text;
|
||||
}
|
||||
|
||||
&__stat-label {
|
||||
font-size: 11px;
|
||||
color: $color-text-muted;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
}
|
||||
|
||||
&__severity-bar {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
&__severity {
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
padding: 4px 10px;
|
||||
border-radius: 9999px;
|
||||
color: white;
|
||||
|
||||
&--critical {
|
||||
background: $color-critical;
|
||||
}
|
||||
|
||||
&--high {
|
||||
background: $color-high;
|
||||
}
|
||||
|
||||
&--medium {
|
||||
background: $color-medium;
|
||||
}
|
||||
|
||||
&--low {
|
||||
background: $color-low;
|
||||
}
|
||||
|
||||
&--info {
|
||||
background: $color-info;
|
||||
}
|
||||
}
|
||||
|
||||
&__preview {
|
||||
padding: 0 20px 20px;
|
||||
}
|
||||
|
||||
&__preview-title {
|
||||
margin: 0 0 12px;
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.05em;
|
||||
color: $color-text-muted;
|
||||
}
|
||||
|
||||
&__sink-list {
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
&__sink-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
padding: 12px;
|
||||
border: 1px solid $color-border;
|
||||
border-radius: 8px;
|
||||
cursor: pointer;
|
||||
transition: all 0.15s ease;
|
||||
|
||||
&:hover {
|
||||
background: $color-bg-hover;
|
||||
border-color: #d1d5db;
|
||||
}
|
||||
|
||||
&:focus-visible {
|
||||
outline: 2px solid #3b82f6;
|
||||
outline-offset: 2px;
|
||||
}
|
||||
}
|
||||
|
||||
&__sink-icon {
|
||||
flex-shrink: 0;
|
||||
width: 28px;
|
||||
height: 28px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
font-size: 14px;
|
||||
font-weight: 600;
|
||||
border-radius: 50%;
|
||||
background: #f3f4f6;
|
||||
color: $color-text-muted;
|
||||
|
||||
&.risk-drift-card__severity--critical {
|
||||
background: #fee2e2;
|
||||
color: $color-critical;
|
||||
}
|
||||
|
||||
&.risk-drift-card__severity--high {
|
||||
background: #ffedd5;
|
||||
color: $color-high;
|
||||
}
|
||||
|
||||
&.risk-drift-card__severity--medium {
|
||||
background: #fef3c7;
|
||||
color: $color-medium;
|
||||
}
|
||||
|
||||
&.risk-drift-card__severity--low {
|
||||
background: #fef9c3;
|
||||
color: $color-low;
|
||||
}
|
||||
}
|
||||
|
||||
&__sink-details {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
&__sink-name {
|
||||
font-weight: 500;
|
||||
font-size: 14px;
|
||||
font-family: var(--font-family-mono, 'SF Mono', Consolas, monospace);
|
||||
color: $color-text;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
&__sink-cve {
|
||||
font-size: 12px;
|
||||
color: $color-critical;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
&__sink-bucket {
|
||||
font-size: 11px;
|
||||
color: $color-text-muted;
|
||||
}
|
||||
|
||||
&__sink-delta {
|
||||
font-size: 14px;
|
||||
font-weight: 600;
|
||||
font-family: var(--font-family-mono, 'SF Mono', Consolas, monospace);
|
||||
|
||||
&.positive {
|
||||
color: $color-positive;
|
||||
}
|
||||
|
||||
&.negative {
|
||||
color: $color-negative;
|
||||
}
|
||||
}
|
||||
|
||||
&__more {
|
||||
margin: 8px 0 0;
|
||||
font-size: 12px;
|
||||
color: $color-text-muted;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
&__footer {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
padding: 12px 20px;
|
||||
border-top: 1px solid $color-border;
|
||||
background: #fafafa;
|
||||
}
|
||||
|
||||
&__pr {
|
||||
font-size: 12px;
|
||||
color: $color-text-muted;
|
||||
background: #f3f4f6;
|
||||
padding: 4px 10px;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
&__btn {
|
||||
padding: 8px 16px;
|
||||
font-size: 14px;
|
||||
font-weight: 500;
|
||||
color: #3b82f6;
|
||||
background: transparent;
|
||||
border: 1px solid #3b82f6;
|
||||
border-radius: 6px;
|
||||
cursor: pointer;
|
||||
transition: all 0.15s ease;
|
||||
|
||||
&:hover {
|
||||
background: #eff6ff;
|
||||
}
|
||||
|
||||
&:focus-visible {
|
||||
outline: 2px solid #3b82f6;
|
||||
outline-offset: 2px;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,137 @@
|
||||
/**
|
||||
* RiskDriftCardComponent - Drift Summary Card
|
||||
* Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
|
||||
* Task: UI-007
|
||||
*/
|
||||
|
||||
import { Component, input, output, computed } from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { DriftResult, DriftSummary, DriftedSink } from '../../models/drift.models';
|
||||
|
||||
/**
|
||||
* Summary card showing reachability drift results.
|
||||
* Displays risk trend, key metrics, and links to details.
|
||||
*
|
||||
* @example
|
||||
* ```html
|
||||
* <app-risk-drift-card
|
||||
* [drift]="driftResult"
|
||||
* [compact]="true"
|
||||
* (viewDetails)="onViewDetails()"
|
||||
* (sinkClick)="onSinkClick($event)">
|
||||
* </app-risk-drift-card>
|
||||
* ```
|
||||
*/
|
||||
@Component({
|
||||
selector: 'app-risk-drift-card',
|
||||
standalone: true,
|
||||
imports: [CommonModule],
|
||||
templateUrl: './risk-drift-card.component.html',
|
||||
styleUrl: './risk-drift-card.component.scss'
|
||||
})
|
||||
export class RiskDriftCardComponent {
|
||||
/** The drift result to display */
|
||||
drift = input.required<DriftResult>();
|
||||
|
||||
/** Compact mode (less detail) */
|
||||
compact = input<boolean>(false);
|
||||
|
||||
/** Whether to show attestation badge */
|
||||
showAttestation = input<boolean>(true);
|
||||
|
||||
/** Maximum sinks to show in preview */
|
||||
maxPreviewSinks = input<number>(3);
|
||||
|
||||
/** Emits when "View Details" is clicked */
|
||||
viewDetails = output<void>();
|
||||
|
||||
/** Emits when a specific sink is clicked */
|
||||
sinkClick = output<DriftedSink>();
|
||||
|
||||
/** Computed: summary from drift */
|
||||
summary = computed<DriftSummary>(() => this.drift().summary);
|
||||
|
||||
/** Computed: is signed with DSSE */
|
||||
isSigned = computed(() => !!this.drift().attestationDigest);
|
||||
|
||||
/** Computed: risk trend icon */
|
||||
trendIcon = computed(() => {
|
||||
const trend = this.summary().riskTrend;
|
||||
switch (trend) {
|
||||
case 'increasing':
|
||||
return '↑';
|
||||
case 'decreasing':
|
||||
return '↓';
|
||||
default:
|
||||
return '→';
|
||||
}
|
||||
});
|
||||
|
||||
/** Computed: risk trend CSS class */
|
||||
trendClass = computed(() => {
|
||||
const trend = this.summary().riskTrend;
|
||||
return `risk-drift-card__trend--${trend}`;
|
||||
});
|
||||
|
||||
/** Computed: top drifted sinks to preview */
|
||||
previewSinks = computed(() => {
|
||||
const sinks = this.drift().driftedSinks;
|
||||
const max = this.maxPreviewSinks();
|
||||
// Sort by risk delta (highest first), then severity
|
||||
return sinks
|
||||
.slice()
|
||||
.sort((a, b) => {
|
||||
const severityOrder = { critical: 0, high: 1, medium: 2, low: 3, info: 4 };
|
||||
const aSev = severityOrder[a.severity ?? 'info'];
|
||||
const bSev = severityOrder[b.severity ?? 'info'];
|
||||
if (aSev !== bSev) return aSev - bSev;
|
||||
return b.riskDelta - a.riskDelta;
|
||||
})
|
||||
.slice(0, max);
|
||||
});
|
||||
|
||||
/** Computed: additional sinks count */
|
||||
additionalSinksCount = computed(() => {
|
||||
return Math.max(0, this.drift().driftedSinks.length - this.maxPreviewSinks());
|
||||
});
|
||||
|
||||
/** Handle view details click */
|
||||
onViewDetails(): void {
|
||||
this.viewDetails.emit();
|
||||
}
|
||||
|
||||
/** Handle sink click */
|
||||
onSinkClick(sink: DriftedSink): void {
|
||||
this.sinkClick.emit(sink);
|
||||
}
|
||||
|
||||
/** Format risk delta */
|
||||
formatRiskDelta(delta: number): string {
|
||||
if (delta > 0) return `+${delta}`;
|
||||
return delta.toString();
|
||||
}
|
||||
|
||||
/** Get severity badge class */
|
||||
getSeverityClass(severity?: string): string {
|
||||
return severity ? `risk-drift-card__severity--${severity}` : '';
|
||||
}
|
||||
|
||||
/** Format timestamp */
|
||||
formatTime(iso: string): string {
|
||||
const date = new Date(iso);
|
||||
return date.toLocaleString();
|
||||
}
|
||||
|
||||
/** Get bucket label */
|
||||
getBucketLabel(bucket: string | null): string {
|
||||
if (!bucket) return 'N/A';
|
||||
const labels: Record<string, string> = {
|
||||
entrypoint: 'Entry Point',
|
||||
direct: 'Direct',
|
||||
runtime: 'Runtime',
|
||||
unknown: 'Unknown',
|
||||
unreachable: 'Unreachable'
|
||||
};
|
||||
return labels[bucket] ?? bucket;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user