wip - advisories and ui extensions
This commit is contained in:
@@ -0,0 +1,325 @@
|
||||
# SPRINT_20251229_001_000_FE_lineage_smartdiff_overview
|
||||
|
||||
## Smart-Diff & SBOM Lineage Graph - Frontend Implementation Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 000 (Index) |
|
||||
| **MODULEID** | FE (Frontend) |
|
||||
| **Topic** | Smart-Diff & SBOM Lineage Graph - Complete Frontend Strategy |
|
||||
| **Working Directory** | `src/Web/StellaOps.Web/src/app/features/` |
|
||||
| **Status** | IN PROGRESS |
|
||||
| **Parent Advisory** | ADVISORY_SBOM_LINEAGE_GRAPH.md (Archived) |
|
||||
|
||||
---
|
||||
|
||||
## Executive Summary
|
||||
|
||||
The SBOM Lineage Graph frontend visualization is **~75% complete**. This document consolidates the remaining implementation work into focused sprints for delivery.
|
||||
|
||||
### Existing Infrastructure Assessment
|
||||
|
||||
| Area | Completion | Notes |
|
||||
|------|------------|-------|
|
||||
| **Lineage Graph SVG** | 95% | Full DAG visualization with lanes, pan/zoom, nodes |
|
||||
| **Hover Cards** | 85% | Basic info displayed; needs CGS integration |
|
||||
| **SBOM Diff View** | 90% | 3-column diff exists; needs row expanders |
|
||||
| **VEX Diff View** | 90% | Status change display; needs reachability gates |
|
||||
| **Compare Mode** | 85% | Three-pane layout exists; needs explainer timeline |
|
||||
| **Export Dialog** | 80% | Basic export; needs audit pack format |
|
||||
| **Proof Tree** | 75% | Merkle tree viz; needs confidence breakdown |
|
||||
| **Reachability Diff** | 60% | Basic view; needs gate visualization |
|
||||
|
||||
### Remaining Gap Analysis
|
||||
|
||||
| Gap | Priority | Effort | Sprint |
|
||||
|-----|----------|--------|--------|
|
||||
| Explainer Timeline (engine steps) | P0 | 5-7 days | FE_005 |
|
||||
| Node Diff Table with Expanders | P0 | 4-5 days | FE_006 |
|
||||
| Pinned Explanations (copy-safe) | P1 | 2-3 days | FE_007 |
|
||||
| Confidence Breakdown Charts | P1 | 3-4 days | FE_004 (exists) |
|
||||
| Reachability Gate Diff View | P1 | 3-4 days | FE_008 |
|
||||
| CGS API Integration | P0 | 3-5 days | FE_003 (exists) |
|
||||
| Audit Pack Export UI | P2 | 2-3 days | FE_009 |
|
||||
|
||||
---
|
||||
|
||||
## Sprint Dependency Graph
|
||||
|
||||
```
|
||||
┌──────────────────────────────────────┐
|
||||
│ SPRINT_001_003_FE_lineage_graph │
|
||||
│ (CGS Integration - Minor) │
|
||||
└──────────────┬───────────────────────┘
|
||||
│
|
||||
┌────────────────────┼────────────────────┐
|
||||
│ │ │
|
||||
▼ ▼ ▼
|
||||
┌─────────────────────┐ ┌─────────────────────┐ ┌─────────────────────┐
|
||||
│ FE_005 Explainer │ │ FE_006 Node Diff │ │ FE_008 Reachability │
|
||||
│ Timeline │ │ Table + Expanders │ │ Gate Diff │
|
||||
└──────────┬──────────┘ └──────────┬──────────┘ └──────────┬──────────┘
|
||||
│ │ │
|
||||
└───────────────────────┼───────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌──────────────────────────────────────┐
|
||||
│ FE_007 Pinned Explanations │
|
||||
│ (Copy-safe ticket creation) │
|
||||
└──────────────┬───────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌──────────────────────────────────────┐
|
||||
│ FE_009 Audit Pack Export UI │
|
||||
│ (Merkle root + formats) │
|
||||
└──────────────────────────────────────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Existing Component Inventory
|
||||
|
||||
### Lineage Feature (`src/app/features/lineage/`)
|
||||
|
||||
| Component | File | Status | Sprint |
|
||||
|-----------|------|--------|--------|
|
||||
| `LineageGraphComponent` | `lineage-graph.component.ts` | Complete | - |
|
||||
| `LineageNodeComponent` | `lineage-node.component.ts` | Complete | - |
|
||||
| `LineageEdgeComponent` | `lineage-edge.component.ts` | Complete | - |
|
||||
| `LineageHoverCardComponent` | `lineage-hover-card.component.ts` | Needs CGS | FE_003 |
|
||||
| `LineageMiniMapComponent` | `lineage-minimap.component.ts` | Complete | - |
|
||||
| `LineageControlsComponent` | `lineage-controls.component.ts` | Complete | - |
|
||||
| `LineageSbomDiffComponent` | `lineage-sbom-diff.component.ts` | Needs expanders | FE_006 |
|
||||
| `LineageVexDiffComponent` | `lineage-vex-diff.component.ts` | Needs gates | FE_008 |
|
||||
| `LineageCompareComponent` | `lineage-compare.component.ts` | Needs timeline | FE_005 |
|
||||
| `LineageExportDialogComponent` | `lineage-export-dialog.component.ts` | Needs audit pack | FE_009 |
|
||||
| `ReplayHashDisplayComponent` | `replay-hash-display.component.ts` | Complete | - |
|
||||
| `WhySafePanelComponent` | `why-safe-panel.component.ts` | Complete | - |
|
||||
| `ProofTreeComponent` | `proof-tree.component.ts` | Needs confidence | FE_004 |
|
||||
| `LineageGraphContainerComponent` | `lineage-graph-container.component.ts` | Orchestrator | - |
|
||||
|
||||
### Compare Feature (`src/app/features/compare/`)
|
||||
|
||||
| Component | File | Status | Sprint |
|
||||
|-----------|------|--------|--------|
|
||||
| `CompareViewComponent` | `compare-view.component.ts` | Signals-based | - |
|
||||
| `ThreePaneLayoutComponent` | `three-pane-layout.component.ts` | Complete | - |
|
||||
| `DeltaSummaryStripComponent` | `delta-summary-strip.component.ts` | Complete | - |
|
||||
| `TrustIndicatorsComponent` | `trust-indicators.component.ts` | Complete | - |
|
||||
| `CategoriesPaneComponent` | `categories-pane.component.ts` | Complete | - |
|
||||
| `ItemsPaneComponent` | `items-pane.component.ts` | Needs expanders | FE_006 |
|
||||
| `ProofPaneComponent` | `proof-pane.component.ts` | Complete | - |
|
||||
| `EnvelopeHashesComponent` | `envelope-hashes.component.ts` | Complete | - |
|
||||
| `GraphMiniMapComponent` | `graph-mini-map.component.ts` | Complete | - |
|
||||
|
||||
### Shared Components (`src/app/shared/components/`)
|
||||
|
||||
| Component | Status | Notes |
|
||||
|-----------|--------|-------|
|
||||
| `DataTableComponent` | Complete | Sortable, selectable, virtual scroll |
|
||||
| `BadgeComponent` | Complete | Status indicators |
|
||||
| `TooltipDirective` | Complete | Hover info |
|
||||
| `ModalComponent` | Complete | Dialog overlays |
|
||||
| `EmptyStateComponent` | Complete | No data UI |
|
||||
| `LoadingComponent` | Complete | Skeleton screens |
|
||||
| `GraphDiffComponent` | Complete | Generic diff visualization |
|
||||
| `VexTrustChipComponent` | Complete | Trust score badges |
|
||||
| `ScoreComponent` | Complete | Numeric score display |
|
||||
|
||||
---
|
||||
|
||||
## API Integration Points
|
||||
|
||||
### Required Backend Endpoints (from SbomService)
|
||||
|
||||
```typescript
|
||||
// CGS-enabled lineage APIs (from SPRINT_001_003)
|
||||
GET /api/v1/lineage/{artifactDigest}
|
||||
→ LineageGraph { nodes: LineageNode[], edges: LineageEdge[] }
|
||||
|
||||
GET /api/v1/lineage/{artifactDigest}/compare?to={targetDigest}
|
||||
→ LineageDiffResponse { componentDiff, vexDeltas, reachabilityDeltas }
|
||||
|
||||
POST /api/v1/lineage/export
|
||||
→ AuditPackResponse { bundleDigest, merkleRoot, downloadUrl }
|
||||
|
||||
// Proof trace APIs (from VexLens)
|
||||
GET /api/v1/verdicts/{cgsHash}
|
||||
→ ProofTrace { verdict, factors, evidenceChain, replayHash }
|
||||
|
||||
GET /api/v1/verdicts/{cgsHash}/replay
|
||||
→ ReplayResult { matches: boolean, deviation?: DeviationReport }
|
||||
```
|
||||
|
||||
### TypeScript API Client Services
|
||||
|
||||
| Service | Location | Status |
|
||||
|---------|----------|--------|
|
||||
| `LineageGraphService` | `features/lineage/services/` | Needs CGS endpoints |
|
||||
| `LineageExportService` | `features/lineage/services/` | Needs audit pack |
|
||||
| `CompareService` | `features/compare/services/` | Complete |
|
||||
| `DeltaVerdictService` | `core/services/` | Needs proof trace |
|
||||
| `AuditPackService` | `core/services/` | Needs implementation |
|
||||
|
||||
---
|
||||
|
||||
## Sprint Schedule (Recommended)
|
||||
|
||||
| Sprint | Title | Est. Effort | Dependencies |
|
||||
|--------|-------|-------------|--------------|
|
||||
| FE_003 | CGS Integration | 3-5 days | BE_001 |
|
||||
| FE_004 | Proof Studio | 5-7 days | FE_003 |
|
||||
| FE_005 | Explainer Timeline | 5-7 days | FE_003 |
|
||||
| FE_006 | Node Diff Table | 4-5 days | FE_003 |
|
||||
| FE_007 | Pinned Explanations | 2-3 days | FE_005, FE_006 |
|
||||
| FE_008 | Reachability Gate Diff | 3-4 days | BE_002 (ReachGraph) |
|
||||
| FE_009 | Audit Pack Export UI | 2-3 days | BE ExportCenter |
|
||||
|
||||
**Total Estimated Effort: 25-34 days (~5-7 weeks)**
|
||||
|
||||
---
|
||||
|
||||
## Design System & Patterns
|
||||
|
||||
### Angular 17 Patterns Used
|
||||
|
||||
```typescript
|
||||
// Signals-based state management
|
||||
readonly nodes = signal<LineageNode[]>([]);
|
||||
readonly selectedNode = computed(() => this.nodes().find(n => n.selected));
|
||||
|
||||
// Standalone components
|
||||
@Component({
|
||||
selector: 'app-explainer-timeline',
|
||||
standalone: true,
|
||||
imports: [CommonModule, SharedModule],
|
||||
changeDetection: ChangeDetectionStrategy.OnPush
|
||||
})
|
||||
export class ExplainerTimelineComponent {
|
||||
readonly steps = input<ExplainerStep[]>([]);
|
||||
readonly stepClick = output<ExplainerStep>();
|
||||
}
|
||||
```
|
||||
|
||||
### Styling Conventions
|
||||
|
||||
```scss
|
||||
// Dark mode support
|
||||
:host {
|
||||
--bg-primary: var(--theme-bg-primary, #fff);
|
||||
--text-primary: var(--theme-text-primary, #333);
|
||||
--accent-color: var(--theme-accent, #007bff);
|
||||
}
|
||||
|
||||
.dark-mode {
|
||||
--theme-bg-primary: #1a1a2e;
|
||||
--theme-text-primary: #e0e0e0;
|
||||
}
|
||||
|
||||
// Consistent spacing
|
||||
.panel { padding: var(--spacing-md, 16px); }
|
||||
.row { margin-bottom: var(--spacing-sm, 8px); }
|
||||
|
||||
// Animations
|
||||
@keyframes fadeIn {
|
||||
from { opacity: 0; transform: translateY(-10px); }
|
||||
to { opacity: 1; transform: translateY(0); }
|
||||
}
|
||||
```
|
||||
|
||||
### Component Hierarchy Pattern
|
||||
|
||||
```
|
||||
Container (data loading, state orchestration)
|
||||
├── Header (title, actions)
|
||||
├── Body
|
||||
│ ├── MainView (primary visualization)
|
||||
│ ├── SidePanel (details, filters)
|
||||
│ └── BottomBar (status, pagination)
|
||||
└── Dialogs (modals, exports)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Unit Tests
|
||||
- Component logic with TestBed
|
||||
- Service mocks with Jasmine spies
|
||||
- Signal updates and computed values
|
||||
- Template bindings with ComponentFixture
|
||||
|
||||
### Integration Tests
|
||||
- Component interactions (parent-child)
|
||||
- Service integration with HttpClientTestingModule
|
||||
- Router navigation
|
||||
|
||||
### E2E Tests
|
||||
- Critical user flows (graph → hover → compare → export)
|
||||
- Keyboard navigation
|
||||
- Mobile responsive layout
|
||||
|
||||
### Coverage Target: ≥80%
|
||||
|
||||
---
|
||||
|
||||
## Accessibility (a11y) Requirements
|
||||
|
||||
| Feature | Requirement |
|
||||
|---------|-------------|
|
||||
| Keyboard Navigation | Arrow keys for node focus, Enter to select |
|
||||
| Screen Reader | ARIA labels for nodes, edges, and actions |
|
||||
| Focus Indicators | Visible focus rings on interactive elements |
|
||||
| Color Contrast | WCAG AA (4.5:1 for text, 3:1 for graphics) |
|
||||
| Motion | Respect `prefers-reduced-motion` |
|
||||
|
||||
---
|
||||
|
||||
## File Structure Template
|
||||
|
||||
```
|
||||
src/app/features/<feature>/
|
||||
├── <feature>.routes.ts
|
||||
├── components/
|
||||
│ ├── <component-name>/
|
||||
│ │ ├── <component-name>.component.ts
|
||||
│ │ ├── <component-name>.component.html (if external)
|
||||
│ │ ├── <component-name>.component.scss (if external)
|
||||
│ │ └── <component-name>.component.spec.ts
|
||||
├── services/
|
||||
│ ├── <feature>.service.ts
|
||||
│ └── <feature>.service.spec.ts
|
||||
├── models/
|
||||
│ └── <feature>.models.ts
|
||||
├── directives/
|
||||
│ └── <directive>.directive.ts
|
||||
└── __tests__/
|
||||
└── <feature>.e2e.spec.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Related Sprints
|
||||
|
||||
| Sprint ID | Title | Status |
|
||||
|-----------|-------|--------|
|
||||
| SPRINT_20251229_001_001_BE_cgs_infrastructure | CGS Backend | TODO |
|
||||
| SPRINT_20251229_001_002_BE_vex_delta | VEX Delta Backend | TODO |
|
||||
| SPRINT_20251229_001_003_FE_lineage_graph | CGS Integration | TODO |
|
||||
| SPRINT_20251229_001_004_FE_proof_studio | Proof Studio | TODO |
|
||||
| SPRINT_20251229_001_005_FE_explainer_timeline | Explainer Timeline | TODO |
|
||||
| SPRINT_20251229_001_006_FE_node_diff_table | Node Diff Table | TODO |
|
||||
| SPRINT_20251229_001_007_FE_pinned_explanations | Pinned Explanations | TODO |
|
||||
| SPRINT_20251229_001_008_FE_reachability_gate_diff | Reachability Diff | TODO |
|
||||
| SPRINT_20251229_001_009_FE_audit_pack_export | Audit Pack Export | TODO |
|
||||
|
||||
---
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Overview created | Consolidated from product advisory analysis |
|
||||
| 2025-12-29 | Gap analysis completed | 75% existing, 25% remaining |
|
||||
| 2025-12-29 | Sprint schedule defined | 5-7 weeks estimated |
|
||||
153
docs/implplan/SPRINT_20251229_001_001_BE_cgs_infrastructure.md
Normal file
153
docs/implplan/SPRINT_20251229_001_001_BE_cgs_infrastructure.md
Normal file
@@ -0,0 +1,153 @@
|
||||
# SPRINT_20251229_001_001_BE_cgs_infrastructure
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 001 |
|
||||
| **MODULEID** | BE (Backend) |
|
||||
| **Topic** | CGS (Canonical Graph Signature) Infrastructure |
|
||||
| **Working Directory** | `src/` (cross-cutting) |
|
||||
| **Status** | TODO |
|
||||
|
||||
## Context
|
||||
|
||||
This sprint implements the unified Verdict Builder service that composes existing determinism infrastructure into a single cohesive API. The architecture already exists (~85% complete per CONSOLIDATED - Deterministic Evidence and Verdict Architecture.md), but lacks the orchestration layer.
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/product-advisories/archived/CONSOLIDATED - Deterministic Evidence and Verdict Architecture.md`
|
||||
- `docs/modules/attestor/architecture.md` (ProofChain section)
|
||||
- `docs/modules/policy/architecture.md` (Determinism section)
|
||||
- `docs/modules/replay/architecture.md`
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Read `docs/modules/attestor/architecture.md` (ProofChain/Identifiers)
|
||||
- [ ] Read `docs/modules/policy/architecture.md` (Section 6.1 - VEX decision attestation)
|
||||
- [ ] Understand existing `StellaOps.Attestor.ProofChain` library
|
||||
|
||||
## Delivery Tracker
|
||||
|
||||
| ID | Task | Status | Assignee | Notes |
|
||||
|----|------|--------|----------|-------|
|
||||
| CGS-001 | Create `IVerdictBuilder` interface | TODO | | Primary abstraction |
|
||||
| CGS-002 | Implement `VerdictBuilderService` | TODO | | Compose Sbom/VEX/Policy/Attestor |
|
||||
| CGS-003 | Add `POST /api/v1/verdicts/build` endpoint | TODO | | Accept EvidencePack, return CGS |
|
||||
| CGS-004 | Add `GET /api/v1/verdicts/{cgs_hash}` endpoint | TODO | | Replay/retrieval |
|
||||
| CGS-005 | Add `POST /api/v1/verdicts/diff` endpoint | TODO | | Delta between two CGS hashes |
|
||||
| CGS-006 | Implement `PolicyLock` generator | TODO | | Freeze rule versions |
|
||||
| CGS-007 | Wire Fulcio keyless signing | TODO | | Configure Sigstore integration |
|
||||
| CGS-008 | Add cross-platform determinism tests | TODO | | Ubuntu/Alpine/Debian runners |
|
||||
| CGS-009 | Add golden file tests for CGS hash stability | TODO | | Same input → same hash |
|
||||
|
||||
## Technical Design
|
||||
|
||||
### VerdictBuilder Interface
|
||||
|
||||
```csharp
|
||||
// Location: src/__Libraries/StellaOps.Verdict/IVerdictBuilder.cs
|
||||
public interface IVerdictBuilder
|
||||
{
|
||||
/// <summary>
|
||||
/// Build a deterministic verdict from evidence pack.
|
||||
/// Same inputs always produce identical CGS hash and verdict.
|
||||
/// </summary>
|
||||
ValueTask<VerdictResult> BuildAsync(
|
||||
EvidencePack evidence,
|
||||
PolicyLock policyLock,
|
||||
CancellationToken ct);
|
||||
|
||||
/// <summary>
|
||||
/// Replay a verdict from stored CGS hash.
|
||||
/// Returns identical result or 404 if not found.
|
||||
/// </summary>
|
||||
ValueTask<VerdictResult?> ReplayAsync(
|
||||
string cgsHash,
|
||||
CancellationToken ct);
|
||||
|
||||
/// <summary>
|
||||
/// Compute delta between two verdicts.
|
||||
/// </summary>
|
||||
ValueTask<VerdictDelta> DiffAsync(
|
||||
string fromCgs,
|
||||
string toCgs,
|
||||
CancellationToken ct);
|
||||
}
|
||||
|
||||
public sealed record VerdictResult(
|
||||
string CgsHash,
|
||||
VerdictPayload Verdict,
|
||||
DsseEnvelope Dsse,
|
||||
ProofTrace Trace,
|
||||
DateTimeOffset ComputedAt);
|
||||
|
||||
public sealed record EvidencePack(
|
||||
string SbomCanonJson,
|
||||
IReadOnlyList<string> VexCanonJson,
|
||||
string? ReachabilityGraphJson,
|
||||
string FeedSnapshotDigest);
|
||||
|
||||
public sealed record PolicyLock(
|
||||
string SchemaVersion,
|
||||
string PolicyVersion,
|
||||
IReadOnlyDictionary<string, string> RuleHashes,
|
||||
string EngineVersion,
|
||||
DateTimeOffset GeneratedAt);
|
||||
```
|
||||
|
||||
### API Endpoints
|
||||
|
||||
```
|
||||
POST /api/v1/verdicts/build
|
||||
Request: { evidence_pack, policy_lock }
|
||||
Response: { cgs_hash, verdict, dsse, proof_trace }
|
||||
|
||||
GET /api/v1/verdicts/{cgs_hash}
|
||||
Response: { cgs_hash, verdict, dsse, proof_trace } or 404
|
||||
|
||||
POST /api/v1/verdicts/diff
|
||||
Request: { from_cgs, to_cgs }
|
||||
Response: { changes[], added_vulns[], removed_vulns[], status_changes[] }
|
||||
```
|
||||
|
||||
### CGS Hash Computation
|
||||
|
||||
```csharp
|
||||
// Reuse existing Merkle tree builder
|
||||
var builder = new DeterministicMerkleTreeBuilder();
|
||||
|
||||
// Leaves are content-addressed evidence components
|
||||
var leaves = new[]
|
||||
{
|
||||
sbomDigest,
|
||||
...vexDigests.OrderBy(d => d, StringComparer.Ordinal),
|
||||
reachabilityDigest,
|
||||
policyLock.ToCanonicalHash()
|
||||
};
|
||||
|
||||
var cgsHash = builder.Build(leaves).RootHash;
|
||||
```
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] `POST /verdicts/build` returns deterministic CGS hash
|
||||
- [ ] Same inputs on different machines produce identical CGS
|
||||
- [ ] DSSE envelope verifies with Sigstore
|
||||
- [ ] Golden file tests pass on Ubuntu/Alpine/Debian
|
||||
- [ ] Replay endpoint returns identical verdict
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status |
|
||||
|----|---------------|--------|
|
||||
| DR-001 | Use existing ProofChain Merkle builder vs new impl | PENDING |
|
||||
| DR-002 | Fulcio keyless requires OIDC - air-gap fallback? | PENDING |
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | Initial planning |
|
||||
|
||||
189
docs/implplan/SPRINT_20251229_001_002_BE_vex_delta.md
Normal file
189
docs/implplan/SPRINT_20251229_001_002_BE_vex_delta.md
Normal file
@@ -0,0 +1,189 @@
|
||||
# SPRINT_20251229_001_002_BE_vex_delta
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 002 |
|
||||
| **MODULEID** | BE (Backend) |
|
||||
| **Topic** | VEX Delta Persistence and SBOM-Verdict Linking |
|
||||
| **Working Directory** | `src/Excititor/`, `src/SbomService/`, `src/VexLens/` |
|
||||
| **Status** | TODO |
|
||||
|
||||
## Context
|
||||
|
||||
The VEX delta schema is designed in `ADVISORY_SBOM_LINEAGE_GRAPH.md` but not migrated to PostgreSQL. This sprint implements:
|
||||
1. VEX delta table for tracking status transitions (affected → not_affected)
|
||||
2. SBOM-verdict link table for joining scan results to VEX consensus
|
||||
3. PostgreSQL backend for VexLens consensus projections (currently in-memory)
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/product-advisories/archived/ADVISORY_SBOM_LINEAGE_GRAPH.md` (Gap Analysis section)
|
||||
- `docs/modules/sbomservice/lineage/architecture.md`
|
||||
- `docs/modules/vex-lens/architecture.md`
|
||||
- `docs/modules/excititor/architecture.md`
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Read VEX delta schema from ADVISORY_SBOM_LINEAGE_GRAPH.md
|
||||
- [ ] Understand VexLens in-memory store limitations
|
||||
- [ ] Review existing `OpenVexStatementMerger` and `MergeTrace`
|
||||
|
||||
## Delivery Tracker
|
||||
|
||||
| ID | Task | Status | Assignee | Notes |
|
||||
|----|------|--------|----------|-------|
|
||||
| VEX-001 | Create migration: `vex.deltas` table | TODO | | From advisory schema |
|
||||
| VEX-002 | Create migration: `sbom.verdict_links` table | TODO | | Join SBOM versions to verdicts |
|
||||
| VEX-003 | Create migration: `vex.consensus_projections` table | TODO | | Replace in-memory VexLens store |
|
||||
| VEX-004 | Implement `IVexDeltaRepository` | TODO | | CRUD for delta records |
|
||||
| VEX-005 | Implement `ISbomVerdictLinkRepository` | TODO | | Link SBOM → consensus |
|
||||
| VEX-006 | Implement `IConsensusProjectionRepository` | TODO | | PostgreSQL backend for VexLens |
|
||||
| VEX-007 | Wire merge trace persistence | TODO | | Save trace to delta record |
|
||||
| VEX-008 | Add `VexDeltaAttestation` predicate type | TODO | | DSSE for delta transitions |
|
||||
| VEX-009 | Update VexLens to use PostgreSQL | TODO | | Replace `InMemoryStore` |
|
||||
| VEX-010 | Add indexes for delta queries | TODO | | (from_digest, to_digest, cve) |
|
||||
|
||||
## Database Migrations
|
||||
|
||||
### Migration: 20251229000001_AddVexDeltas.sql
|
||||
|
||||
```sql
|
||||
-- VEX status transition records
|
||||
CREATE TABLE vex.deltas (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id UUID NOT NULL,
|
||||
from_artifact_digest TEXT NOT NULL,
|
||||
to_artifact_digest TEXT NOT NULL,
|
||||
cve TEXT NOT NULL,
|
||||
from_status TEXT NOT NULL CHECK (from_status IN ('affected', 'not_affected', 'fixed', 'under_investigation', 'unknown')),
|
||||
to_status TEXT NOT NULL CHECK (to_status IN ('affected', 'not_affected', 'fixed', 'under_investigation', 'unknown')),
|
||||
rationale JSONB NOT NULL DEFAULT '{}',
|
||||
replay_hash TEXT NOT NULL,
|
||||
attestation_digest TEXT,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
CONSTRAINT vex_deltas_unique UNIQUE (tenant_id, from_artifact_digest, to_artifact_digest, cve)
|
||||
);
|
||||
|
||||
-- Indexes for common queries
|
||||
CREATE INDEX idx_vex_deltas_to ON vex.deltas(to_artifact_digest, tenant_id);
|
||||
CREATE INDEX idx_vex_deltas_cve ON vex.deltas(cve, tenant_id);
|
||||
CREATE INDEX idx_vex_deltas_created ON vex.deltas(tenant_id, created_at DESC);
|
||||
|
||||
-- RLS policy
|
||||
ALTER TABLE vex.deltas ENABLE ROW LEVEL SECURITY;
|
||||
CREATE POLICY vex_deltas_tenant_isolation ON vex.deltas
|
||||
FOR ALL USING (tenant_id = vex_app.require_current_tenant()::UUID);
|
||||
```
|
||||
|
||||
### Migration: 20251229000002_AddSbomVerdictLinks.sql
|
||||
|
||||
```sql
|
||||
-- Link SBOM versions to VEX verdicts
|
||||
CREATE TABLE sbom.verdict_links (
|
||||
sbom_version_id UUID NOT NULL,
|
||||
cve TEXT NOT NULL,
|
||||
consensus_projection_id UUID NOT NULL,
|
||||
verdict_status TEXT NOT NULL,
|
||||
confidence_score DECIMAL(5,4) NOT NULL CHECK (confidence_score >= 0 AND confidence_score <= 1),
|
||||
tenant_id UUID NOT NULL,
|
||||
linked_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
PRIMARY KEY (sbom_version_id, cve, tenant_id)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_verdict_links_cve ON sbom.verdict_links(cve, tenant_id);
|
||||
CREATE INDEX idx_verdict_links_projection ON sbom.verdict_links(consensus_projection_id);
|
||||
|
||||
-- RLS policy
|
||||
ALTER TABLE sbom.verdict_links ENABLE ROW LEVEL SECURITY;
|
||||
CREATE POLICY verdict_links_tenant_isolation ON sbom.verdict_links
|
||||
FOR ALL USING (tenant_id = sbom_app.require_current_tenant()::UUID);
|
||||
```
|
||||
|
||||
### Migration: 20251229000003_AddConsensusProjections.sql
|
||||
|
||||
```sql
|
||||
-- Persistent VexLens consensus (replaces in-memory store)
|
||||
CREATE TABLE vex.consensus_projections (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id UUID NOT NULL,
|
||||
vulnerability_id TEXT NOT NULL,
|
||||
product_key TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
confidence_score DECIMAL(5,4) NOT NULL,
|
||||
outcome TEXT NOT NULL,
|
||||
statement_count INT NOT NULL,
|
||||
conflict_count INT NOT NULL,
|
||||
merge_trace JSONB,
|
||||
computed_at TIMESTAMPTZ NOT NULL,
|
||||
stored_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
previous_projection_id UUID REFERENCES vex.consensus_projections(id),
|
||||
status_changed BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
|
||||
CONSTRAINT consensus_unique UNIQUE (tenant_id, vulnerability_id, product_key, computed_at)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_consensus_vuln ON vex.consensus_projections(vulnerability_id, tenant_id);
|
||||
CREATE INDEX idx_consensus_product ON vex.consensus_projections(product_key, tenant_id);
|
||||
CREATE INDEX idx_consensus_computed ON vex.consensus_projections(tenant_id, computed_at DESC);
|
||||
|
||||
-- RLS policy
|
||||
ALTER TABLE vex.consensus_projections ENABLE ROW LEVEL SECURITY;
|
||||
CREATE POLICY consensus_tenant_isolation ON vex.consensus_projections
|
||||
FOR ALL USING (tenant_id = vex_app.require_current_tenant()::UUID);
|
||||
```
|
||||
|
||||
## Repository Interfaces
|
||||
|
||||
```csharp
|
||||
// Location: src/Excititor/__Libraries/StellaOps.Excititor.Core/Repositories/IVexDeltaRepository.cs
|
||||
public interface IVexDeltaRepository
|
||||
{
|
||||
ValueTask<VexDelta> AddAsync(VexDelta delta, CancellationToken ct);
|
||||
|
||||
ValueTask<IReadOnlyList<VexDelta>> GetDeltasAsync(
|
||||
string fromDigest, string toDigest, Guid tenantId, CancellationToken ct);
|
||||
|
||||
ValueTask<IReadOnlyList<VexDelta>> GetDeltasByCveAsync(
|
||||
string cve, Guid tenantId, int limit, CancellationToken ct);
|
||||
}
|
||||
|
||||
public sealed record VexDelta(
|
||||
Guid Id,
|
||||
Guid TenantId,
|
||||
string FromArtifactDigest,
|
||||
string ToArtifactDigest,
|
||||
string Cve,
|
||||
VexStatus FromStatus,
|
||||
VexStatus ToStatus,
|
||||
VexDeltaRationale Rationale,
|
||||
string ReplayHash,
|
||||
string? AttestationDigest,
|
||||
DateTimeOffset CreatedAt);
|
||||
```
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] All three migrations apply cleanly on fresh DB
|
||||
- [ ] VexLens stores projections in PostgreSQL
|
||||
- [ ] Delta records created on status transitions
|
||||
- [ ] SBOM-verdict links queryable by CVE
|
||||
- [ ] RLS enforces tenant isolation
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status |
|
||||
|----|---------------|--------|
|
||||
| DR-001 | Keep in-memory VexLens cache for hot path? | PENDING |
|
||||
| DR-002 | Backfill existing scans with verdict links? | PENDING |
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | Initial planning |
|
||||
|
||||
201
docs/implplan/SPRINT_20251229_001_003_FE_lineage_graph.md
Normal file
201
docs/implplan/SPRINT_20251229_001_003_FE_lineage_graph.md
Normal file
@@ -0,0 +1,201 @@
|
||||
# SPRINT_20251229_001_003_FE_lineage_graph
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 003 |
|
||||
| **MODULEID** | FE (Frontend) |
|
||||
| **Topic** | CGS Integration & Minor UI Enhancements |
|
||||
| **Working Directory** | `src/Web/StellaOps.Web/src/app/features/lineage/` |
|
||||
| **Status** | TODO |
|
||||
| **Revised Scope** | MINOR - Core visualization already exists |
|
||||
|
||||
## Context
|
||||
|
||||
**REVISION:** Exploration revealed that the lineage graph visualization is **already ~85% implemented**:
|
||||
- 41 TypeScript files in `features/lineage/`
|
||||
- 31 visualization components including graph, hover cards, diff views
|
||||
- Full compare mode with three-pane layout
|
||||
- Proof tree and replay hash display components exist
|
||||
|
||||
This sprint is now scoped to **minor integration work** with the new CGS backend APIs.
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/modules/sbomservice/lineage/architecture.md` (API spec)
|
||||
- `docs/modules/ui/architecture.md`
|
||||
- `docs/product-advisories/archived/ADVISORY_SBOM_LINEAGE_GRAPH.md`
|
||||
- Existing compare feature: `src/Web/StellaOps.Web/src/app/features/compare/`
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Read lineage API spec from sbomservice/lineage/architecture.md
|
||||
- [ ] Review existing compare-view component
|
||||
- [ ] Understand Angular 17 signals/observables patterns in codebase
|
||||
|
||||
## Existing Components (Already Implemented)
|
||||
|
||||
| Component | Location | Status |
|
||||
|-----------|----------|--------|
|
||||
| `lineage-graph.component` | `components/lineage-graph/` | ✅ Complete - SVG lane layout |
|
||||
| `lineage-node.component` | `components/lineage-node/` | ✅ Complete - Badges, shapes |
|
||||
| `lineage-edge.component` | `components/lineage-edge/` | ✅ Complete - Bezier curves |
|
||||
| `lineage-hover-card.component` | `components/lineage-hover-card/` | ✅ Complete |
|
||||
| `lineage-sbom-diff.component` | `components/lineage-sbom-diff/` | ✅ Complete - 3-column |
|
||||
| `lineage-vex-diff.component` | `components/lineage-vex-diff/` | ✅ Complete |
|
||||
| `lineage-compare.component` | `components/lineage-compare/` | ✅ Complete |
|
||||
| `lineage-minimap.component` | `components/lineage-minimap/` | ✅ Complete |
|
||||
| `lineage-controls.component` | `components/lineage-controls/` | ✅ Complete |
|
||||
| `proof-tree.component` | `shared/components/proof-tree/` | ✅ Complete |
|
||||
| `replay-hash-display.component` | `components/replay-hash-display/` | ✅ Complete |
|
||||
| `export-dialog.component` | `components/export-dialog/` | ✅ Complete |
|
||||
| `graph-diff.component` | `shared/components/graph-diff/` | ✅ Complete |
|
||||
|
||||
## Delivery Tracker (Revised - Minor Tasks)
|
||||
|
||||
| ID | Task | Status | Assignee | Notes |
|
||||
|----|------|--------|----------|-------|
|
||||
| LG-001 | Wire `lineage-graph.service` to new CGS APIs | TODO | | Add `buildVerdict()`, `replayVerdict()` |
|
||||
| LG-002 | Add CGS hash display to `lineage-node.component` | TODO | | Show `cgs_hash` in tooltip |
|
||||
| LG-003 | Wire `proof-tree.component` to verdict traces | TODO | | Consume `ProofTrace` from CGS API |
|
||||
| LG-004 | Add "Replay Verdict" button to hover card | TODO | | Calls `GET /verdicts/{cgs}` |
|
||||
| LG-005 | Display confidence factor chips | TODO | | Add to existing node badges |
|
||||
| LG-006 | Unit tests for new CGS integration | TODO | | |
|
||||
|
||||
**Estimated Effort: 3-5 days (down from 10+ days)**
|
||||
|
||||
## Component Architecture
|
||||
|
||||
```
|
||||
src/app/features/lineage/
|
||||
├── lineage.module.ts
|
||||
├── lineage-routing.module.ts
|
||||
├── services/
|
||||
│ ├── lineage.service.ts # API client
|
||||
│ └── lineage-graph.service.ts # DAG layout computation
|
||||
├── components/
|
||||
│ ├── lineage-graph/
|
||||
│ │ ├── lineage-graph.component.ts
|
||||
│ │ ├── lineage-graph.component.html
|
||||
│ │ └── lineage-graph.component.scss
|
||||
│ ├── lineage-node/
|
||||
│ │ ├── lineage-node.component.ts
|
||||
│ │ └── ...
|
||||
│ ├── lineage-edge/
|
||||
│ │ └── ...
|
||||
│ ├── lineage-hover-card/
|
||||
│ │ └── ...
|
||||
│ └── lineage-diff-popup/
|
||||
│ └── ...
|
||||
└── models/
|
||||
├── lineage-node.model.ts
|
||||
└── lineage-edge.model.ts
|
||||
```
|
||||
|
||||
## UI Mockup
|
||||
|
||||
```
|
||||
┌────────────────────────────────────────────────────────────────────────┐
|
||||
│ Lineage Graph: registry/app:v1.2 [Export Pack] │
|
||||
├────────────────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
|
||||
│ │ v1.0 │────▶│ v1.1 │────▶│ v1.2 │←─ Current │
|
||||
│ │ 2025-12-01 │ │ 2025-12-15 │ │ 2025-12-28 │ │
|
||||
│ │ 🔴 5 vulns │ │ 🟡 3 vulns │ │ 🟢 0 vulns │ │
|
||||
│ │ ✓ signed │ │ ✓ signed │ │ ✓ signed │ │
|
||||
│ └─────────────┘ └─────────────┘ └─────────────┘ │
|
||||
│ │ │
|
||||
│ │ base │
|
||||
│ ▼ │
|
||||
│ ┌─────────────┐ │
|
||||
│ │ alpine:3.19 │ │
|
||||
│ │ (base img) │ │
|
||||
│ └─────────────┘ │
|
||||
│ │
|
||||
│ ┌─────────────────────────────────────────────────────────────────────┐│
|
||||
│ │ Hover Card: v1.1 → v1.2 ││
|
||||
│ │ ┌─ SBOM Diff ─────────────────────────────────────────────────────┐ ││
|
||||
│ │ │ + pkg:npm/lodash@4.17.21 (added) │ ││
|
||||
│ │ │ - pkg:npm/lodash@4.17.20 (removed) │ ││
|
||||
│ │ │ ~ pkg:npm/axios 1.5.0 → 1.6.0 │ ││
|
||||
│ │ └─────────────────────────────────────────────────────────────────┘ ││
|
||||
│ │ ┌─ VEX Changes ───────────────────────────────────────────────────┐ ││
|
||||
│ │ │ CVE-2024-1234: affected → not_affected (component removed) │ ││
|
||||
│ │ │ CVE-2024-5678: reachable → unreachable (gates added) │ ││
|
||||
│ │ └─────────────────────────────────────────────────────────────────┘ ││
|
||||
│ │ Replay Hash: sha256:abc123... [Replay] [View Proof] ││
|
||||
│ └─────────────────────────────────────────────────────────────────────┘│
|
||||
└────────────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## Badge Definitions
|
||||
|
||||
| Badge | Condition | Color |
|
||||
|-------|-----------|-------|
|
||||
| 🔴 N vulns | Critical/High findings > 0 | Red |
|
||||
| 🟡 N vulns | Medium findings, no Critical/High | Yellow |
|
||||
| 🟢 0 vulns | No findings | Green |
|
||||
| ✓ signed | Valid DSSE signature | Green |
|
||||
| ✗ unsigned | No signature or invalid | Red |
|
||||
| ⟳ replay | Has replay hash | Blue |
|
||||
|
||||
## API Integration
|
||||
|
||||
```typescript
|
||||
// lineage.service.ts
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class LineageService {
|
||||
constructor(private http: HttpClient) {}
|
||||
|
||||
getLineage(artifactDigest: string, options?: LineageQueryOptions): Observable<LineageGraphResponse> {
|
||||
return this.http.get<LineageGraphResponse>(`/api/v1/lineage/${encodeURIComponent(artifactDigest)}`, {
|
||||
params: {
|
||||
maxDepth: options?.maxDepth ?? 10,
|
||||
includeVerdicts: options?.includeVerdicts ?? true,
|
||||
includeBadges: options?.includeBadges ?? true
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
getDiff(from: string, to: string): Observable<LineageDiffResponse> {
|
||||
return this.http.get<LineageDiffResponse>('/api/v1/lineage/diff', {
|
||||
params: { from, to }
|
||||
});
|
||||
}
|
||||
|
||||
exportPack(digests: string[]): Observable<ExportResponse> {
|
||||
return this.http.post<ExportResponse>('/api/v1/lineage/export', {
|
||||
artifactDigests: digests,
|
||||
includeAttestations: true,
|
||||
sign: true
|
||||
});
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] Graph renders DAG with nodes and edges
|
||||
- [ ] Hover shows SBOM/VEX diff summary
|
||||
- [ ] Click opens full diff view
|
||||
- [ ] Export downloads valid audit pack
|
||||
- [ ] Responsive layout works on tablet/mobile
|
||||
- [ ] Keyboard navigation functional
|
||||
- [ ] Tests pass with ≥80% coverage
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status |
|
||||
|----|---------------|--------|
|
||||
| DR-001 | Use d3.js vs custom SVG? | PENDING - recommend dagre-d3 |
|
||||
| DR-002 | Lazy load large graphs (>50 nodes)? | PENDING |
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | Initial planning |
|
||||
|
||||
203
docs/implplan/SPRINT_20251229_001_004_FE_proof_studio.md
Normal file
203
docs/implplan/SPRINT_20251229_001_004_FE_proof_studio.md
Normal file
@@ -0,0 +1,203 @@
|
||||
# SPRINT_20251229_001_004_FE_proof_studio
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 004 |
|
||||
| **MODULEID** | FE (Frontend) |
|
||||
| **Topic** | Proof Studio - Confidence Breakdown & What-If |
|
||||
| **Working Directory** | `src/Web/StellaOps.Web/src/app/features/` |
|
||||
| **Status** | TODO |
|
||||
| **Revised Scope** | MEDIUM - Core proof visualization exists, adding new features |
|
||||
|
||||
## Context
|
||||
|
||||
**REVISION:** Exploration revealed significant existing infrastructure:
|
||||
- `proof-tree.component` - Merkle tree visualization exists
|
||||
- `why-safe-panel.component` - VEX justification exists
|
||||
- `trust-indicators.component` - Signature/policy status exists
|
||||
- `replay-hash-display.component` - Determinism indicator exists
|
||||
|
||||
This sprint focuses on **new features** not yet implemented:
|
||||
1. Confidence score breakdown with factor visualization
|
||||
2. What-if evidence slider for simulation
|
||||
3. Integration with new CGS VerdictBuilder API
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/product-advisories/archived/CONSOLIDATED - Deterministic Evidence and Verdict Architecture.md`
|
||||
- `docs/modules/policy/architecture.md` (Proof Trace section)
|
||||
- Existing `ProofTreeComponent` in UI
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Read proof trace format from Policy architecture
|
||||
- [ ] Review existing triage workspace components
|
||||
- [ ] Understand confidence score computation
|
||||
|
||||
## Existing Components (Already Implemented)
|
||||
|
||||
| Component | Location | Status |
|
||||
|-----------|----------|--------|
|
||||
| `proof-tree.component` | `shared/components/` | ✅ Complete - Merkle tree viz |
|
||||
| `why-safe-panel.component` | `features/lineage/components/` | ✅ Complete - VEX justification |
|
||||
| `trust-indicators.component` | `features/compare/components/` | ✅ Complete - Signature status |
|
||||
| `replay-hash-display.component` | `features/lineage/components/` | ✅ Complete - Hash display |
|
||||
| `export-dialog.component` | `features/lineage/components/` | ✅ Complete - Audit export |
|
||||
| `envelope-hashes.component` | `features/compare/components/` | ✅ Complete - Attestation display |
|
||||
|
||||
## Delivery Tracker (Revised - New Features Only)
|
||||
|
||||
| ID | Task | Status | Assignee | Notes |
|
||||
|----|------|--------|----------|-------|
|
||||
| PS-001 | Implement `ConfidenceBreakdownComponent` | TODO | | NEW - Score factor bar chart |
|
||||
| PS-002 | Implement `ConfidenceFactorChip` | TODO | | NEW - Factor badges |
|
||||
| PS-003 | Implement `WhatIfSliderComponent` | TODO | | NEW - Evidence simulation |
|
||||
| PS-004 | Wire proof-tree to CGS proof traces | TODO | | Integration with new API |
|
||||
| PS-005 | Add confidence breakdown to verdict card | TODO | | Template update |
|
||||
| PS-006 | Unit tests for new components | TODO | | |
|
||||
|
||||
**Estimated Effort: 5-7 days (down from 8+ days)**
|
||||
|
||||
## Component Architecture
|
||||
|
||||
```
|
||||
src/app/features/proof-studio/
|
||||
├── proof-studio.module.ts
|
||||
├── proof-studio-routing.module.ts
|
||||
├── services/
|
||||
│ └── proof-studio.service.ts
|
||||
├── components/
|
||||
│ ├── proof-tree/ # Extended existing
|
||||
│ │ └── proof-tree.component.ts
|
||||
│ ├── confidence-breakdown/
|
||||
│ │ ├── confidence-breakdown.component.ts
|
||||
│ │ └── ...
|
||||
│ ├── confidence-factor/
|
||||
│ │ └── confidence-factor.component.ts
|
||||
│ ├── what-if-slider/
|
||||
│ │ └── what-if-slider.component.ts
|
||||
│ ├── verdict-timeline/
|
||||
│ │ └── verdict-timeline.component.ts
|
||||
│ └── audit-pack-dialog/
|
||||
│ └── audit-pack-dialog.component.ts
|
||||
└── models/
|
||||
├── proof-trace.model.ts
|
||||
└── confidence-factor.model.ts
|
||||
```
|
||||
|
||||
## UI Mockup - Confidence Breakdown
|
||||
|
||||
```
|
||||
┌────────────────────────────────────────────────────────────────────────┐
|
||||
│ Verdict: CVE-2024-1234 → NOT AFFECTED │
|
||||
│ Confidence: 0.87 [Replay] [⤓] │
|
||||
├────────────────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌─ Confidence Breakdown ─────────────────────────────────────────────┐│
|
||||
│ │ ││
|
||||
│ │ ████████████████████████░░░░░░░░░░░░░░░░░░░░░░░░░░░░░ 87% ││
|
||||
│ │ ││
|
||||
│ │ ┌─────────────────────────────────────────────────────────────┐ ││
|
||||
│ │ │ Reachability ████████████░░░░░░░░░░░░ 0.65 │ Unreachable│ ││
|
||||
│ │ │ VEX Evidence ██████████████████░░░░░░ 0.80 │ 3 sources │ ││
|
||||
│ │ │ Policy Rules ██████████████████████░░ 0.95 │ v2.1.3 │ ││
|
||||
│ │ │ Provenance ████████████████░░░░░░░░ 0.70 │ Signed │ ││
|
||||
│ │ └─────────────────────────────────────────────────────────────┘ ││
|
||||
│ │ ││
|
||||
│ │ Combined: (0.65 × 0.25) + (0.80 × 0.30) + (0.95 × 0.25) + ││
|
||||
│ │ (0.70 × 0.20) = 0.87 ││
|
||||
│ └─────────────────────────────────────────────────────────────────────┘│
|
||||
│ │
|
||||
│ ┌─ Proof Tree ───────────────────────────────────────────────────────┐│
|
||||
│ │ 📋 Finding: CVE-2024-1234 in pkg:npm/lodash@4.17.20 ││
|
||||
│ │ ├─ 🔍 Reachability Analysis ││
|
||||
│ │ │ └─ ✗ No call path to vulnerable function _.template() ││
|
||||
│ │ │ └─ Entry: main.js:42 → utils.js:15 → ✗ lodash (blocked) ││
|
||||
│ │ ├─ 📝 VEX Sources ││
|
||||
│ │ │ ├─ ✓ Vendor VEX: not_affected (0.90 trust) ││
|
||||
│ │ │ ├─ ✓ Community: not_affected (0.70 trust) ││
|
||||
│ │ │ └─ ~ NIST: under_investigation (0.60 trust) ││
|
||||
│ │ ├─ ⚖️ Policy: reach-gate-v2 ││
|
||||
│ │ │ └─ ✓ Rule matched: "unreachable_vuln → not_affected" ││
|
||||
│ │ └─ 🔐 Attestation ││
|
||||
│ │ └─ ✓ DSSE signed, Rekor logged (index: 123456) ││
|
||||
│ └─────────────────────────────────────────────────────────────────────┘│
|
||||
│ │
|
||||
│ ┌─ What-If Simulation ───────────────────────────────────────────────┐│
|
||||
│ │ Remove evidence: [VEX: Vendor] [VEX: Community] [Reachability] ││
|
||||
│ │ ──────────────────────────────────●───────────────────────────── ││
|
||||
│ │ Simulated confidence: 0.52 (→ UNDER_INVESTIGATION) ││
|
||||
│ └─────────────────────────────────────────────────────────────────────┘│
|
||||
└────────────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## Data Models
|
||||
|
||||
```typescript
|
||||
// proof-trace.model.ts
|
||||
export interface ProofTrace {
|
||||
findingKey: FindingKey;
|
||||
verdict: VerdictStatus;
|
||||
confidenceScore: number;
|
||||
factors: ConfidenceFactor[];
|
||||
ruleHits: RuleHit[];
|
||||
evidenceChain: EvidenceNode[];
|
||||
cgsHash: string;
|
||||
dsseStatus: 'valid' | 'invalid' | 'unsigned';
|
||||
rekorIndex?: number;
|
||||
}
|
||||
|
||||
export interface ConfidenceFactor {
|
||||
id: string;
|
||||
name: string;
|
||||
weight: number;
|
||||
score: number;
|
||||
contribution: number; // weight × score
|
||||
source: string;
|
||||
details: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface RuleHit {
|
||||
ruleId: string;
|
||||
ruleName: string;
|
||||
version: string;
|
||||
matchedFacts: string[];
|
||||
decision: string;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
export interface EvidenceNode {
|
||||
id: string;
|
||||
type: 'sbom' | 'vex' | 'reachability' | 'attestation';
|
||||
digest: string;
|
||||
source: string;
|
||||
confidence: number;
|
||||
children?: EvidenceNode[];
|
||||
}
|
||||
```
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] Proof tree renders complete evidence chain
|
||||
- [ ] Confidence breakdown shows factor contributions
|
||||
- [ ] What-if slider simulates score changes
|
||||
- [ ] Timeline shows verdict evolution
|
||||
- [ ] Audit pack downloads complete evidence
|
||||
- [ ] Replay action verifies determinism
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status |
|
||||
|----|---------------|--------|
|
||||
| DR-001 | What-if computation: client or server? | PENDING - recommend server |
|
||||
| DR-002 | Timeline depth limit? | PENDING |
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | Initial planning |
|
||||
|
||||
687
docs/implplan/SPRINT_20251229_001_005_FE_explainer_timeline.md
Normal file
687
docs/implplan/SPRINT_20251229_001_005_FE_explainer_timeline.md
Normal file
@@ -0,0 +1,687 @@
|
||||
# SPRINT_20251229_001_005_FE_explainer_timeline
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 005 |
|
||||
| **MODULEID** | FE (Frontend) |
|
||||
| **Topic** | Explainer Timeline - Engine Step Visualization |
|
||||
| **Working Directory** | `src/Web/StellaOps.Web/src/app/features/lineage/components/explainer-timeline/` |
|
||||
| **Status** | TODO |
|
||||
| **Priority** | P0 - Core UX Deliverable |
|
||||
| **Estimated Effort** | 5-7 days |
|
||||
|
||||
---
|
||||
|
||||
## Context
|
||||
|
||||
The Explainer Timeline provides a step-by-step visualization of how the verdict engine arrived at a decision. This is critical for:
|
||||
- **Auditors**: Understanding the decision chain for compliance
|
||||
- **Security Engineers**: Debugging why a CVE was marked safe/unsafe
|
||||
- **Developers**: Learning what evidence influenced their artifact's status
|
||||
|
||||
This component does NOT exist in the current codebase and must be built from scratch.
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/product-advisories/archived/ADVISORY_SBOM_LINEAGE_GRAPH.md` (Explainer section)
|
||||
- `docs/modules/policy/architecture.md` (ProofTrace format)
|
||||
- `docs/modules/vexlens/architecture.md` (Consensus Engine)
|
||||
- Existing: `src/app/features/lineage/components/why-safe-panel/` (similar concept, simpler)
|
||||
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Read Policy architecture for ProofTrace format
|
||||
- [ ] Read VexLens consensus engine documentation
|
||||
- [ ] Review existing `WhySafePanelComponent` for patterns
|
||||
- [ ] Understand confidence factor computation from backend
|
||||
|
||||
---
|
||||
|
||||
## User Stories
|
||||
|
||||
| ID | Story | Acceptance Criteria |
|
||||
|----|-------|---------------------|
|
||||
| US-001 | As an auditor, I want to see each engine step in chronological order | Timeline shows ordered steps with timestamps |
|
||||
| US-002 | As a security engineer, I want to expand a step to see details | Clicking step reveals evidence and sub-steps |
|
||||
| US-003 | As a developer, I want to understand why my artifact passed/failed | Clear verdict explanation with contributing factors |
|
||||
| US-004 | As any user, I want to copy a step summary for a ticket | Copy button generates markdown-formatted text |
|
||||
|
||||
---
|
||||
|
||||
## Delivery Tracker
|
||||
|
||||
| ID | Task | Status | Est. | Notes |
|
||||
|----|------|--------|------|-------|
|
||||
| ET-001 | Create `ExplainerTimelineComponent` shell | TODO | 0.5d | Standalone component with signals |
|
||||
| ET-002 | Design step data model (`ExplainerStep`) | TODO | 0.5d | TypeScript interfaces |
|
||||
| ET-003 | Implement timeline layout (vertical) | TODO | 1d | CSS Grid/Flexbox with connectors |
|
||||
| ET-004 | Implement `ExplainerStepComponent` | TODO | 1d | Individual step card |
|
||||
| ET-005 | Add step expansion with animation | TODO | 0.5d | Expand/collapse with @angular/animations |
|
||||
| ET-006 | Wire to ProofTrace API | TODO | 0.5d | Service integration |
|
||||
| ET-007 | Implement confidence indicators | TODO | 0.5d | Progress bars, chips |
|
||||
| ET-008 | Add copy-to-clipboard action | TODO | 0.5d | Markdown formatting |
|
||||
| ET-009 | Dark mode styling | TODO | 0.25d | CSS variables |
|
||||
| ET-010 | Accessibility (a11y) | TODO | 0.5d | ARIA, keyboard nav |
|
||||
| ET-011 | Unit tests | TODO | 0.5d | ≥80% coverage |
|
||||
| ET-012 | Integration with hover card | TODO | 0.25d | Show in hover context |
|
||||
|
||||
---
|
||||
|
||||
## Component Architecture
|
||||
|
||||
```
|
||||
src/app/features/lineage/components/explainer-timeline/
|
||||
├── explainer-timeline.component.ts # Container
|
||||
├── explainer-timeline.component.html
|
||||
├── explainer-timeline.component.scss
|
||||
├── explainer-timeline.component.spec.ts
|
||||
├── explainer-step/
|
||||
│ ├── explainer-step.component.ts # Individual step
|
||||
│ ├── explainer-step.component.html
|
||||
│ └── explainer-step.component.scss
|
||||
├── step-connector/
|
||||
│ └── step-connector.component.ts # Visual connector line
|
||||
└── models/
|
||||
└── explainer.models.ts # Data interfaces
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Data Models
|
||||
|
||||
```typescript
|
||||
// explainer.models.ts
|
||||
|
||||
/**
|
||||
* Represents an engine processing step in the explainer timeline.
|
||||
*/
|
||||
export interface ExplainerStep {
|
||||
/** Unique step identifier */
|
||||
id: string;
|
||||
|
||||
/** Step sequence number (1, 2, 3...) */
|
||||
sequence: number;
|
||||
|
||||
/** Step type for visual differentiation */
|
||||
type: ExplainerStepType;
|
||||
|
||||
/** Short title (e.g., "VEX Consensus") */
|
||||
title: string;
|
||||
|
||||
/** Longer description of what happened */
|
||||
description: string;
|
||||
|
||||
/** When this step was executed */
|
||||
timestamp: string;
|
||||
|
||||
/** Duration in milliseconds */
|
||||
durationMs: number;
|
||||
|
||||
/** Input data summary */
|
||||
input?: StepDataSummary;
|
||||
|
||||
/** Output data summary */
|
||||
output?: StepDataSummary;
|
||||
|
||||
/** Confidence contribution (0.0 - 1.0) */
|
||||
confidenceContribution?: number;
|
||||
|
||||
/** Nested sub-steps (for drill-down) */
|
||||
children?: ExplainerStep[];
|
||||
|
||||
/** Whether step passed/failed */
|
||||
status: 'success' | 'failure' | 'skipped' | 'pending';
|
||||
|
||||
/** Evidence references */
|
||||
evidenceDigests?: string[];
|
||||
|
||||
/** Rule that was applied */
|
||||
ruleId?: string;
|
||||
|
||||
/** Rule version */
|
||||
ruleVersion?: string;
|
||||
}
|
||||
|
||||
export type ExplainerStepType =
|
||||
| 'sbom-ingest' // SBOM was ingested
|
||||
| 'vex-lookup' // VEX sources queried
|
||||
| 'vex-consensus' // Consensus computed
|
||||
| 'reachability' // Reachability analysis
|
||||
| 'policy-eval' // Policy rule evaluation
|
||||
| 'verdict' // Final verdict
|
||||
| 'attestation' // Signature verification
|
||||
| 'cache-hit' // Cached result used
|
||||
| 'gate-check'; // Gate evaluation
|
||||
|
||||
export interface StepDataSummary {
|
||||
/** Number of items processed */
|
||||
itemCount: number;
|
||||
|
||||
/** Key-value metadata */
|
||||
metadata: Record<string, string | number | boolean>;
|
||||
|
||||
/** Link to detailed view */
|
||||
detailsUrl?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Complete explainer response from API.
|
||||
*/
|
||||
export interface ExplainerResponse {
|
||||
/** Finding key (CVE + PURL) */
|
||||
findingKey: string;
|
||||
|
||||
/** Final verdict */
|
||||
verdict: 'affected' | 'not_affected' | 'fixed' | 'under_investigation';
|
||||
|
||||
/** Overall confidence score */
|
||||
confidenceScore: number;
|
||||
|
||||
/** Processing steps in order */
|
||||
steps: ExplainerStep[];
|
||||
|
||||
/** Total processing time */
|
||||
totalDurationMs: number;
|
||||
|
||||
/** CGS hash for replay */
|
||||
cgsHash: string;
|
||||
|
||||
/** Whether this was replayed */
|
||||
isReplay: boolean;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## UI Mockup
|
||||
|
||||
```
|
||||
┌────────────────────────────────────────────────────────────────────────────┐
|
||||
│ Verdict Explanation: CVE-2024-1234 → NOT_AFFECTED │
|
||||
│ Confidence: 0.87 | Total Time: 42ms | CGS: sha256:abc123... [Replay] │
|
||||
├────────────────────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌─────────────────────────────────────────────────────────────────────┐ │
|
||||
│ │ ① SBOM Ingest 2ms ✓ │ │
|
||||
│ │ ───────────────────────────────────────────────────────────────── │ │
|
||||
│ │ Parsed 847 components from CycloneDX 1.6 SBOM │ │
|
||||
│ └─────────────────────────────────────────────────────────────────────┘ │
|
||||
│ │ │
|
||||
│ ▼ │
|
||||
│ ┌─────────────────────────────────────────────────────────────────────┐ │
|
||||
│ │ ② VEX Lookup 8ms ✓ │ │
|
||||
│ │ ───────────────────────────────────────────────────────────────── │ │
|
||||
│ │ Queried 4 VEX sources for CVE-2024-1234 │ │
|
||||
│ │ │ │
|
||||
│ │ ┌─ Expand ──────────────────────────────────────────────────────┐ │ │
|
||||
│ │ │ • Red Hat: not_affected (trust: 0.90) │ │ │
|
||||
│ │ │ • GitHub: not_affected (trust: 0.75) │ │ │
|
||||
│ │ │ • NIST: under_investigation (trust: 0.60) │ │ │
|
||||
│ │ │ • Community: not_affected (trust: 0.65) │ │ │
|
||||
│ │ └───────────────────────────────────────────────────────────────┘ │ │
|
||||
│ └─────────────────────────────────────────────────────────────────────┘ │
|
||||
│ │ │
|
||||
│ ▼ │
|
||||
│ ┌─────────────────────────────────────────────────────────────────────┐ │
|
||||
│ │ ③ VEX Consensus 3ms ✓ │ │
|
||||
│ │ ───────────────────────────────────────────────────────────────── │ │
|
||||
│ │ Computed consensus using WeightedVote algorithm │ │
|
||||
│ │ Result: not_affected (confidence: 0.82) │ │
|
||||
│ │ Contribution: +0.25 to final score │ │
|
||||
│ └─────────────────────────────────────────────────────────────────────┘ │
|
||||
│ │ │
|
||||
│ ▼ │
|
||||
│ ┌─────────────────────────────────────────────────────────────────────┐ │
|
||||
│ │ ④ Reachability Analysis 18ms ✓ │ │
|
||||
│ │ ───────────────────────────────────────────────────────────────── │ │
|
||||
│ │ Analyzed call paths to vulnerable function _.template() │ │
|
||||
│ │ Result: UNREACHABLE (0 paths found) │ │
|
||||
│ │ │ │
|
||||
│ │ ┌─ Gates ───────────────────────────────────────────────────────┐ │ │
|
||||
│ │ │ ✓ Auth Gate: requireAdmin() at auth.ts:42 │ │ │
|
||||
│ │ │ ✓ Feature Flag: ENABLE_TEMPLATES=false │ │ │
|
||||
│ │ └───────────────────────────────────────────────────────────────┘ │ │
|
||||
│ │ Contribution: +0.35 to final score │ │
|
||||
│ └─────────────────────────────────────────────────────────────────────┘ │
|
||||
│ │ │
|
||||
│ ▼ │
|
||||
│ ┌─────────────────────────────────────────────────────────────────────┐ │
|
||||
│ │ ⑤ Policy Evaluation 5ms ✓ │ │
|
||||
│ │ ───────────────────────────────────────────────────────────────── │ │
|
||||
│ │ Applied rule: reach-gate-v2 (version 2.1.3) │ │
|
||||
│ │ Match: "unreachable_vuln + vex_consensus → not_affected" │ │
|
||||
│ │ Contribution: +0.20 to final score │ │
|
||||
│ └─────────────────────────────────────────────────────────────────────┘ │
|
||||
│ │ │
|
||||
│ ▼ │
|
||||
│ ┌─────────────────────────────────────────────────────────────────────┐ │
|
||||
│ │ ⑥ Final Verdict 2ms ✓ │ │
|
||||
│ │ ───────────────────────────────────────────────────────────────── │ │
|
||||
│ │ ┌───────────────────────────────────────────────────────────────┐ │ │
|
||||
│ │ │ ████████████████████████████░░░░░ 87% NOT_AFFECTED │ │ │
|
||||
│ │ └───────────────────────────────────────────────────────────────┘ │ │
|
||||
│ │ DSSE Signed ✓ | Rekor Index: 123456 | [View Attestation] │ │
|
||||
│ └─────────────────────────────────────────────────────────────────────┘ │
|
||||
│ │
|
||||
│ [Copy Summary] [Copy Full Trace] [Download Evidence] │
|
||||
└────────────────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Component Implementation
|
||||
|
||||
### ExplainerTimelineComponent
|
||||
|
||||
```typescript
|
||||
// explainer-timeline.component.ts
|
||||
import { Component, Input, Output, EventEmitter, signal, computed } from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { ExplainerStepComponent } from './explainer-step/explainer-step.component';
|
||||
import { StepConnectorComponent } from './step-connector/step-connector.component';
|
||||
import { ExplainerResponse, ExplainerStep } from './models/explainer.models';
|
||||
|
||||
@Component({
|
||||
selector: 'app-explainer-timeline',
|
||||
standalone: true,
|
||||
imports: [CommonModule, ExplainerStepComponent, StepConnectorComponent],
|
||||
templateUrl: './explainer-timeline.component.html',
|
||||
styleUrl: './explainer-timeline.component.scss',
|
||||
changeDetection: ChangeDetectionStrategy.OnPush
|
||||
})
|
||||
export class ExplainerTimelineComponent {
|
||||
@Input() data: ExplainerResponse | null = null;
|
||||
@Input() loading = false;
|
||||
@Input() error: string | null = null;
|
||||
|
||||
@Output() stepClick = new EventEmitter<ExplainerStep>();
|
||||
@Output() copyClick = new EventEmitter<'summary' | 'full'>();
|
||||
@Output() replayClick = new EventEmitter<string>();
|
||||
|
||||
readonly expandedStepIds = signal<Set<string>>(new Set());
|
||||
|
||||
readonly sortedSteps = computed(() => {
|
||||
if (!this.data?.steps) return [];
|
||||
return [...this.data.steps].sort((a, b) => a.sequence - b.sequence);
|
||||
});
|
||||
|
||||
toggleStep(stepId: string): void {
|
||||
this.expandedStepIds.update(ids => {
|
||||
const newIds = new Set(ids);
|
||||
if (newIds.has(stepId)) {
|
||||
newIds.delete(stepId);
|
||||
} else {
|
||||
newIds.add(stepId);
|
||||
}
|
||||
return newIds;
|
||||
});
|
||||
}
|
||||
|
||||
isExpanded(stepId: string): boolean {
|
||||
return this.expandedStepIds().has(stepId);
|
||||
}
|
||||
|
||||
getStepIcon(type: string): string {
|
||||
const icons: Record<string, string> = {
|
||||
'sbom-ingest': 'inventory',
|
||||
'vex-lookup': 'search',
|
||||
'vex-consensus': 'how_to_vote',
|
||||
'reachability': 'route',
|
||||
'policy-eval': 'gavel',
|
||||
'verdict': 'verified',
|
||||
'attestation': 'verified_user',
|
||||
'cache-hit': 'cached',
|
||||
'gate-check': 'security'
|
||||
};
|
||||
return icons[type] || 'circle';
|
||||
}
|
||||
|
||||
copyToClipboard(format: 'summary' | 'full'): void {
|
||||
this.copyClick.emit(format);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### ExplainerStepComponent
|
||||
|
||||
```typescript
|
||||
// explainer-step.component.ts
|
||||
import { Component, Input, Output, EventEmitter } from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { trigger, state, style, transition, animate } from '@angular/animations';
|
||||
import { ExplainerStep } from '../models/explainer.models';
|
||||
|
||||
@Component({
|
||||
selector: 'app-explainer-step',
|
||||
standalone: true,
|
||||
imports: [CommonModule],
|
||||
template: `
|
||||
<div class="step-card"
|
||||
[class.expanded]="expanded"
|
||||
[class.success]="step.status === 'success'"
|
||||
[class.failure]="step.status === 'failure'"
|
||||
(click)="toggleExpand()">
|
||||
|
||||
<div class="step-header">
|
||||
<span class="step-number">{{ step.sequence }}</span>
|
||||
<span class="step-icon material-icons">{{ icon }}</span>
|
||||
<span class="step-title">{{ step.title }}</span>
|
||||
<span class="step-duration">{{ step.durationMs }}ms</span>
|
||||
<span class="step-status" [class]="step.status">
|
||||
{{ statusIcon }}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div class="step-description">{{ step.description }}</div>
|
||||
|
||||
@if (step.confidenceContribution) {
|
||||
<div class="confidence-chip">
|
||||
+{{ (step.confidenceContribution * 100).toFixed(0) }}% confidence
|
||||
</div>
|
||||
}
|
||||
|
||||
@if (expanded && step.children?.length) {
|
||||
<div class="step-details" [@expandCollapse]>
|
||||
@for (child of step.children; track child.id) {
|
||||
<div class="sub-step">
|
||||
<span class="sub-step-bullet"></span>
|
||||
<span class="sub-step-text">{{ child.description }}</span>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
`,
|
||||
animations: [
|
||||
trigger('expandCollapse', [
|
||||
state('void', style({ height: '0', opacity: 0 })),
|
||||
state('*', style({ height: '*', opacity: 1 })),
|
||||
transition('void <=> *', animate('200ms ease-in-out'))
|
||||
])
|
||||
]
|
||||
})
|
||||
export class ExplainerStepComponent {
|
||||
@Input({ required: true }) step!: ExplainerStep;
|
||||
@Input() icon = 'circle';
|
||||
@Input() expanded = false;
|
||||
@Output() toggle = new EventEmitter<void>();
|
||||
|
||||
get statusIcon(): string {
|
||||
return this.step.status === 'success' ? '✓' :
|
||||
this.step.status === 'failure' ? '✗' :
|
||||
this.step.status === 'skipped' ? '−' : '○';
|
||||
}
|
||||
|
||||
toggleExpand(): void {
|
||||
this.toggle.emit();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## API Integration
|
||||
|
||||
```typescript
|
||||
// explainer.service.ts
|
||||
import { Injectable, inject } from '@angular/core';
|
||||
import { HttpClient } from '@angular/common/http';
|
||||
import { Observable } from 'rxjs';
|
||||
import { ExplainerResponse } from '../components/explainer-timeline/models/explainer.models';
|
||||
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class ExplainerService {
|
||||
private readonly http = inject(HttpClient);
|
||||
private readonly baseUrl = '/api/v1/verdicts';
|
||||
|
||||
getExplanation(cgsHash: string): Observable<ExplainerResponse> {
|
||||
return this.http.get<ExplainerResponse>(`${this.baseUrl}/${cgsHash}/explain`);
|
||||
}
|
||||
|
||||
replay(cgsHash: string): Observable<{ matches: boolean; deviation?: unknown }> {
|
||||
return this.http.get(`${this.baseUrl}/${cgsHash}/replay`);
|
||||
}
|
||||
|
||||
formatForClipboard(data: ExplainerResponse, format: 'summary' | 'full'): string {
|
||||
if (format === 'summary') {
|
||||
return [
|
||||
`## Verdict: ${data.verdict.toUpperCase()}`,
|
||||
`Confidence: ${(data.confidenceScore * 100).toFixed(0)}%`,
|
||||
`Finding: ${data.findingKey}`,
|
||||
`CGS Hash: ${data.cgsHash}`,
|
||||
'',
|
||||
'### Steps:',
|
||||
...data.steps.map(s => `${s.sequence}. ${s.title}: ${s.status}`)
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
// Full trace includes all details
|
||||
return JSON.stringify(data, null, 2);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Styling (SCSS)
|
||||
|
||||
```scss
|
||||
// explainer-timeline.component.scss
|
||||
:host {
|
||||
display: block;
|
||||
width: 100%;
|
||||
max-width: 800px;
|
||||
font-family: var(--font-family-base);
|
||||
}
|
||||
|
||||
.timeline-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 24px;
|
||||
padding-bottom: 16px;
|
||||
border-bottom: 1px solid var(--border-color, #e0e0e0);
|
||||
}
|
||||
|
||||
.verdict-title {
|
||||
font-size: 18px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.timeline-meta {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
font-size: 13px;
|
||||
color: var(--text-secondary, #666);
|
||||
}
|
||||
|
||||
.timeline-steps {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.step-card {
|
||||
background: var(--bg-primary, #fff);
|
||||
border: 1px solid var(--border-color, #e0e0e0);
|
||||
border-radius: 8px;
|
||||
padding: 16px;
|
||||
margin-bottom: 8px;
|
||||
cursor: pointer;
|
||||
transition: box-shadow 0.2s, border-color 0.2s;
|
||||
|
||||
&:hover {
|
||||
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.1);
|
||||
}
|
||||
|
||||
&.expanded {
|
||||
border-color: var(--accent-color, #007bff);
|
||||
}
|
||||
|
||||
&.success {
|
||||
border-left: 4px solid var(--color-success, #28a745);
|
||||
}
|
||||
|
||||
&.failure {
|
||||
border-left: 4px solid var(--color-danger, #dc3545);
|
||||
}
|
||||
}
|
||||
|
||||
.step-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.step-number {
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
border-radius: 50%;
|
||||
background: var(--accent-color, #007bff);
|
||||
color: white;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.step-icon {
|
||||
font-size: 20px;
|
||||
color: var(--text-secondary, #666);
|
||||
}
|
||||
|
||||
.step-title {
|
||||
flex: 1;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.step-duration {
|
||||
font-size: 12px;
|
||||
color: var(--text-secondary, #666);
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
.step-status {
|
||||
font-size: 16px;
|
||||
|
||||
&.success { color: var(--color-success, #28a745); }
|
||||
&.failure { color: var(--color-danger, #dc3545); }
|
||||
&.skipped { color: var(--text-secondary, #666); }
|
||||
}
|
||||
|
||||
.step-description {
|
||||
margin: 8px 0 0 36px;
|
||||
font-size: 14px;
|
||||
color: var(--text-secondary, #666);
|
||||
}
|
||||
|
||||
.confidence-chip {
|
||||
display: inline-block;
|
||||
margin: 8px 0 0 36px;
|
||||
padding: 2px 8px;
|
||||
background: var(--color-success-light, #d4edda);
|
||||
color: var(--color-success, #155724);
|
||||
border-radius: 12px;
|
||||
font-size: 11px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.step-details {
|
||||
margin: 16px 0 0 36px;
|
||||
padding: 12px;
|
||||
background: var(--bg-secondary, #f8f9fa);
|
||||
border-radius: 6px;
|
||||
}
|
||||
|
||||
.sub-step {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
gap: 8px;
|
||||
margin-bottom: 8px;
|
||||
|
||||
&:last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.sub-step-bullet {
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
border-radius: 50%;
|
||||
background: var(--accent-color, #007bff);
|
||||
margin-top: 6px;
|
||||
}
|
||||
|
||||
.sub-step-text {
|
||||
flex: 1;
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
.connector {
|
||||
position: absolute;
|
||||
left: 28px;
|
||||
width: 2px;
|
||||
background: var(--border-color, #e0e0e0);
|
||||
height: 8px;
|
||||
}
|
||||
|
||||
.timeline-actions {
|
||||
display: flex;
|
||||
gap: 12px;
|
||||
margin-top: 24px;
|
||||
padding-top: 16px;
|
||||
border-top: 1px solid var(--border-color, #e0e0e0);
|
||||
}
|
||||
|
||||
// Dark mode
|
||||
:host-context(.dark-mode) {
|
||||
.step-card {
|
||||
background: var(--bg-primary-dark, #1e1e2e);
|
||||
border-color: var(--border-color-dark, #3a3a4a);
|
||||
}
|
||||
|
||||
.step-details {
|
||||
background: var(--bg-secondary-dark, #2a2a3a);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] Timeline displays all engine steps in sequence order
|
||||
- [ ] Each step shows: title, duration, status, description
|
||||
- [ ] Steps expand/collapse on click with smooth animation
|
||||
- [ ] Confidence contributions display per-step
|
||||
- [ ] Copy to clipboard works (summary and full formats)
|
||||
- [ ] Replay button triggers verification
|
||||
- [ ] Dark mode styling works correctly
|
||||
- [ ] Keyboard navigation functional (Tab, Enter, Escape)
|
||||
- [ ] Screen reader announces step changes
|
||||
- [ ] Unit tests achieve ≥80% coverage
|
||||
- [ ] Performance: renders 20 steps in <100ms
|
||||
|
||||
---
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status | Resolution |
|
||||
|----|---------------|--------|------------|
|
||||
| DR-001 | Step data source: embed in hover or separate API? | RESOLVED | Separate API (`/explain`) for full traces |
|
||||
| DR-002 | Animation library: @angular/animations vs CSS | RESOLVED | Use @angular/animations for state control |
|
||||
| DR-003 | Copy format: Markdown vs plain text | RESOLVED | Markdown for summary, JSON for full |
|
||||
|
||||
---
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | Detailed implementation spec |
|
||||
817
docs/implplan/SPRINT_20251229_001_006_FE_node_diff_table.md
Normal file
817
docs/implplan/SPRINT_20251229_001_006_FE_node_diff_table.md
Normal file
@@ -0,0 +1,817 @@
|
||||
# SPRINT_20251229_001_006_FE_node_diff_table
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 006 |
|
||||
| **MODULEID** | FE (Frontend) |
|
||||
| **Topic** | Node Diff Table with Expandable Rows |
|
||||
| **Working Directory** | `src/Web/StellaOps.Web/src/app/features/lineage/components/diff-table/` |
|
||||
| **Status** | TODO |
|
||||
| **Priority** | P0 - Core UX Deliverable |
|
||||
| **Estimated Effort** | 4-5 days |
|
||||
|
||||
---
|
||||
|
||||
## Context
|
||||
|
||||
The Node Diff Table provides a tabular view of changes between two lineage nodes (SBOM versions). While the existing `LineageSbomDiffComponent` shows a 3-column diff view, we need:
|
||||
|
||||
1. **Row-level expansion** - Click a component to see version details, license changes, and vulnerability impact
|
||||
2. **Drill-down navigation** - From component → CVEs → VEX status → Evidence
|
||||
3. **Filtering & sorting** - By change type, severity, component type
|
||||
4. **Bulk actions** - Select multiple items for export or ticket creation
|
||||
|
||||
The existing `DataTableComponent` in shared components provides a base, but needs custom row expansion logic.
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/product-advisories/archived/ADVISORY_SBOM_LINEAGE_GRAPH.md` (Diff section)
|
||||
- Existing: `src/app/features/lineage/components/lineage-sbom-diff/`
|
||||
- Existing: `src/app/shared/components/data-table/`
|
||||
- API: `GET /api/v1/lineage/{from}/compare?to={to}`
|
||||
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Review existing `DataTableComponent` for extension patterns
|
||||
- [ ] Review `LineageSbomDiffComponent` for current implementation
|
||||
- [ ] Understand `ComponentDiff` model from backend
|
||||
- [ ] Review shared table styling conventions
|
||||
|
||||
---
|
||||
|
||||
## User Stories
|
||||
|
||||
| ID | Story | Acceptance Criteria |
|
||||
|----|-------|---------------------|
|
||||
| US-001 | As a security engineer, I want to see all component changes in a table | Table shows added/removed/changed components |
|
||||
| US-002 | As a developer, I want to expand a row to see details | Click row reveals version history, CVEs, licenses |
|
||||
| US-003 | As an auditor, I want to filter by change type | Filter buttons: All, Added, Removed, Changed |
|
||||
| US-004 | As a user, I want to sort by different columns | Sort by name, version, severity, change type |
|
||||
| US-005 | As a user, I want to select rows for bulk export | Checkbox selection with bulk action bar |
|
||||
|
||||
---
|
||||
|
||||
## Delivery Tracker
|
||||
|
||||
| ID | Task | Status | Est. | Notes |
|
||||
|----|------|--------|------|-------|
|
||||
| DT-001 | Create `DiffTableComponent` shell | TODO | 0.5d | Standalone component |
|
||||
| DT-002 | Implement column definitions | TODO | 0.5d | Name, Version, License, Vulns, Change |
|
||||
| DT-003 | Add row expansion template | TODO | 1d | Expandable detail section |
|
||||
| DT-004 | Implement filter chips | TODO | 0.5d | Added/Removed/Changed filters |
|
||||
| DT-005 | Add sorting functionality | TODO | 0.5d | Column header sort |
|
||||
| DT-006 | Implement row selection | TODO | 0.5d | Checkbox + bulk actions |
|
||||
| DT-007 | Create `ExpandedRowComponent` | TODO | 0.5d | Detail view template |
|
||||
| DT-008 | Wire to Compare API | TODO | 0.25d | Service integration |
|
||||
| DT-009 | Add pagination/virtual scroll | TODO | 0.25d | For large diffs |
|
||||
| DT-010 | Dark mode styling | TODO | 0.25d | CSS variables |
|
||||
| DT-011 | Unit tests | TODO | 0.5d | ≥80% coverage |
|
||||
|
||||
---
|
||||
|
||||
## Component Architecture
|
||||
|
||||
```
|
||||
src/app/features/lineage/components/diff-table/
|
||||
├── diff-table.component.ts # Main table container
|
||||
├── diff-table.component.html
|
||||
├── diff-table.component.scss
|
||||
├── diff-table.component.spec.ts
|
||||
├── expanded-row/
|
||||
│ ├── expanded-row.component.ts # Row detail view
|
||||
│ ├── expanded-row.component.html
|
||||
│ └── expanded-row.component.scss
|
||||
├── filter-bar/
|
||||
│ ├── filter-bar.component.ts # Filter chips
|
||||
│ └── filter-bar.component.scss
|
||||
├── column-header/
|
||||
│ ├── column-header.component.ts # Sortable header
|
||||
│ └── column-header.component.scss
|
||||
└── models/
|
||||
└── diff-table.models.ts # Table-specific interfaces
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Data Models
|
||||
|
||||
```typescript
|
||||
// diff-table.models.ts
|
||||
|
||||
/**
|
||||
* Column definition for the diff table.
|
||||
*/
|
||||
export interface DiffTableColumn {
|
||||
/** Column identifier */
|
||||
id: string;
|
||||
|
||||
/** Display header text */
|
||||
header: string;
|
||||
|
||||
/** Property path in data object */
|
||||
field: string;
|
||||
|
||||
/** Column width (CSS value) */
|
||||
width?: string;
|
||||
|
||||
/** Whether column is sortable */
|
||||
sortable: boolean;
|
||||
|
||||
/** Custom cell template name */
|
||||
template?: 'text' | 'version' | 'license' | 'vulns' | 'change-type' | 'actions';
|
||||
|
||||
/** Alignment */
|
||||
align?: 'left' | 'center' | 'right';
|
||||
}
|
||||
|
||||
/**
|
||||
* Row data for diff table (flattened from ComponentChange).
|
||||
*/
|
||||
export interface DiffTableRow {
|
||||
/** Row ID (PURL) */
|
||||
id: string;
|
||||
|
||||
/** Component name */
|
||||
name: string;
|
||||
|
||||
/** Package URL */
|
||||
purl: string;
|
||||
|
||||
/** Change type */
|
||||
changeType: 'added' | 'removed' | 'version-changed' | 'license-changed' | 'both-changed';
|
||||
|
||||
/** Previous version (if applicable) */
|
||||
previousVersion?: string;
|
||||
|
||||
/** Current version (if applicable) */
|
||||
currentVersion?: string;
|
||||
|
||||
/** Previous license */
|
||||
previousLicense?: string;
|
||||
|
||||
/** Current license */
|
||||
currentLicense?: string;
|
||||
|
||||
/** Vulnerability impact */
|
||||
vulnImpact?: VulnImpact;
|
||||
|
||||
/** Expanded state */
|
||||
expanded: boolean;
|
||||
|
||||
/** Selection state */
|
||||
selected: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Vulnerability impact for a component change.
|
||||
*/
|
||||
export interface VulnImpact {
|
||||
/** CVEs resolved by this change */
|
||||
resolved: string[];
|
||||
|
||||
/** CVEs introduced by this change */
|
||||
introduced: string[];
|
||||
|
||||
/** CVEs still present */
|
||||
unchanged: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Expanded row detail data.
|
||||
*/
|
||||
export interface ExpandedRowData {
|
||||
/** Component metadata */
|
||||
metadata: Record<string, string>;
|
||||
|
||||
/** Version history (recent) */
|
||||
versionHistory: { version: string; date: string }[];
|
||||
|
||||
/** CVE details */
|
||||
cves: CveDetail[];
|
||||
|
||||
/** License details */
|
||||
licenseInfo?: LicenseInfo;
|
||||
}
|
||||
|
||||
export interface CveDetail {
|
||||
id: string;
|
||||
severity: 'critical' | 'high' | 'medium' | 'low' | 'unknown';
|
||||
status: 'affected' | 'not_affected' | 'fixed' | 'under_investigation';
|
||||
vexSource?: string;
|
||||
}
|
||||
|
||||
export interface LicenseInfo {
|
||||
spdxId: string;
|
||||
name: string;
|
||||
isOsiApproved: boolean;
|
||||
riskLevel: 'low' | 'medium' | 'high';
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter state for the table.
|
||||
*/
|
||||
export interface DiffTableFilter {
|
||||
changeTypes: Set<'added' | 'removed' | 'version-changed' | 'license-changed'>;
|
||||
searchTerm: string;
|
||||
showOnlyVulnerable: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sort state for the table.
|
||||
*/
|
||||
export interface DiffTableSort {
|
||||
column: string;
|
||||
direction: 'asc' | 'desc';
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## UI Mockup
|
||||
|
||||
```
|
||||
┌────────────────────────────────────────────────────────────────────────────┐
|
||||
│ Component Changes: v1.1 → v1.2 │
|
||||
│ 847 components | 12 added | 5 removed | 23 changed │
|
||||
├────────────────────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌─ Filters ─────────────────────────────────────────────────────────────┐ │
|
||||
│ │ [All (40)] [● Added (12)] [● Removed (5)] [● Changed (23)] │ │
|
||||
│ │ Search: [________________________] [□ Vulnerable Only] │ │
|
||||
│ └───────────────────────────────────────────────────────────────────────┘ │
|
||||
│ │
|
||||
│ ┌─ Bulk Actions ────────────────────────────────────────────────────────┐ │
|
||||
│ │ [□] 3 selected | [Export] [Create Ticket] [Clear] │ │
|
||||
│ └───────────────────────────────────────────────────────────────────────┘ │
|
||||
│ │
|
||||
│ ┌──────────────────────────────────────────────────────────────────────┐ │
|
||||
│ │ □ Name │ Version │ License │ Vulns │ Change │ │
|
||||
│ ├──────────────────────────────────────────────────────────────────────┤ │
|
||||
│ │ ▶ lodash │ 4.17.20 → 21 │ MIT │ -2 │ ● Upgraded │ │
|
||||
│ │ ▶ axios │ 1.5.0 → 1.6.0│ MIT │ 0 │ ● Upgraded │ │
|
||||
│ │ ▼ express │ 4.18.2 │ MIT │ +1 │ ● Upgraded │ │
|
||||
│ │ ┌─────────────────────────────────────────────────────────────────┐│ │
|
||||
│ │ │ Package: pkg:npm/express@4.18.2 ││ │
|
||||
│ │ │ Previous: 4.17.1 | Current: 4.18.2 ││ │
|
||||
│ │ │ ││ │
|
||||
│ │ │ Version History: ││ │
|
||||
│ │ │ • 4.18.2 (2024-10-01) - Current ││ │
|
||||
│ │ │ • 4.17.1 (2024-06-15) - Previous ││ │
|
||||
│ │ │ • 4.17.0 (2024-03-01) ││ │
|
||||
│ │ │ ││ │
|
||||
│ │ │ CVE Impact: ││ │
|
||||
│ │ │ ┌──────────────────────────────────────────────────────────┐ ││ │
|
||||
│ │ │ │ + CVE-2024-9999 │ HIGH │ affected │ Introduced │ ││ │
|
||||
│ │ │ │ - CVE-2024-8888 │ MED │ fixed │ Resolved │ ││ │
|
||||
│ │ │ └──────────────────────────────────────────────────────────┘ ││ │
|
||||
│ │ │ ││ │
|
||||
│ │ │ [View SBOM Entry] [View VEX] [Copy PURL] ││ │
|
||||
│ │ └─────────────────────────────────────────────────────────────────┘│ │
|
||||
│ │ ▶ helmet │ — → 7.0.0 │ MIT │ 0 │ ● Added │ │
|
||||
│ │ ▶ moment │ 2.29.4 → — │ MIT │ 0 │ ● Removed │ │
|
||||
│ └──────────────────────────────────────────────────────────────────────┘ │
|
||||
│ │
|
||||
│ Showing 1-20 of 40 | [< Prev] [1] [2] [Next >] │
|
||||
└────────────────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Component Implementation
|
||||
|
||||
### DiffTableComponent
|
||||
|
||||
```typescript
|
||||
// diff-table.component.ts
|
||||
import {
|
||||
Component, Input, Output, EventEmitter,
|
||||
signal, computed, ChangeDetectionStrategy
|
||||
} from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { FormsModule } from '@angular/forms';
|
||||
import { ExpandedRowComponent } from './expanded-row/expanded-row.component';
|
||||
import { FilterBarComponent } from './filter-bar/filter-bar.component';
|
||||
import { ColumnHeaderComponent } from './column-header/column-header.component';
|
||||
import {
|
||||
DiffTableRow, DiffTableColumn, DiffTableFilter, DiffTableSort, ExpandedRowData
|
||||
} from './models/diff-table.models';
|
||||
|
||||
@Component({
|
||||
selector: 'app-diff-table',
|
||||
standalone: true,
|
||||
imports: [
|
||||
CommonModule, FormsModule,
|
||||
ExpandedRowComponent, FilterBarComponent, ColumnHeaderComponent
|
||||
],
|
||||
templateUrl: './diff-table.component.html',
|
||||
styleUrl: './diff-table.component.scss',
|
||||
changeDetection: ChangeDetectionStrategy.OnPush
|
||||
})
|
||||
export class DiffTableComponent {
|
||||
// Input data
|
||||
@Input() rows: DiffTableRow[] = [];
|
||||
@Input() loading = false;
|
||||
@Input() sourceLabel = 'Source';
|
||||
@Input() targetLabel = 'Target';
|
||||
|
||||
// Event outputs
|
||||
@Output() rowExpand = new EventEmitter<DiffTableRow>();
|
||||
@Output() rowSelect = new EventEmitter<DiffTableRow[]>();
|
||||
@Output() exportClick = new EventEmitter<DiffTableRow[]>();
|
||||
@Output() ticketClick = new EventEmitter<DiffTableRow[]>();
|
||||
|
||||
// State
|
||||
readonly filter = signal<DiffTableFilter>({
|
||||
changeTypes: new Set(['added', 'removed', 'version-changed', 'license-changed']),
|
||||
searchTerm: '',
|
||||
showOnlyVulnerable: false
|
||||
});
|
||||
|
||||
readonly sort = signal<DiffTableSort>({
|
||||
column: 'name',
|
||||
direction: 'asc'
|
||||
});
|
||||
|
||||
readonly expandedRowIds = signal<Set<string>>(new Set());
|
||||
readonly selectedRowIds = signal<Set<string>>(new Set());
|
||||
readonly expandedRowData = signal<Map<string, ExpandedRowData>>(new Map());
|
||||
|
||||
// Column definitions
|
||||
readonly columns: DiffTableColumn[] = [
|
||||
{ id: 'select', header: '', field: 'selected', width: '40px', sortable: false, template: 'checkbox' },
|
||||
{ id: 'expand', header: '', field: 'expanded', width: '40px', sortable: false, template: 'expander' },
|
||||
{ id: 'name', header: 'Name', field: 'name', sortable: true, template: 'text' },
|
||||
{ id: 'version', header: 'Version', field: 'version', width: '150px', sortable: true, template: 'version' },
|
||||
{ id: 'license', header: 'License', field: 'currentLicense', width: '100px', sortable: true, template: 'license' },
|
||||
{ id: 'vulns', header: 'Vulns', field: 'vulnImpact', width: '80px', sortable: true, template: 'vulns' },
|
||||
{ id: 'changeType', header: 'Change', field: 'changeType', width: '120px', sortable: true, template: 'change-type' }
|
||||
];
|
||||
|
||||
// Computed: filtered and sorted rows
|
||||
readonly displayRows = computed(() => {
|
||||
let result = [...this.rows];
|
||||
const f = this.filter();
|
||||
const s = this.sort();
|
||||
|
||||
// Apply filters
|
||||
if (f.changeTypes.size < 4) {
|
||||
result = result.filter(r => f.changeTypes.has(r.changeType as any));
|
||||
}
|
||||
if (f.searchTerm) {
|
||||
const term = f.searchTerm.toLowerCase();
|
||||
result = result.filter(r =>
|
||||
r.name.toLowerCase().includes(term) ||
|
||||
r.purl.toLowerCase().includes(term)
|
||||
);
|
||||
}
|
||||
if (f.showOnlyVulnerable) {
|
||||
result = result.filter(r =>
|
||||
r.vulnImpact && (r.vulnImpact.introduced.length > 0 || r.vulnImpact.resolved.length > 0)
|
||||
);
|
||||
}
|
||||
|
||||
// Apply sort
|
||||
result.sort((a, b) => {
|
||||
const aVal = (a as any)[s.column] ?? '';
|
||||
const bVal = (b as any)[s.column] ?? '';
|
||||
const cmp = String(aVal).localeCompare(String(bVal));
|
||||
return s.direction === 'asc' ? cmp : -cmp;
|
||||
});
|
||||
|
||||
return result;
|
||||
});
|
||||
|
||||
readonly selectedRows = computed(() =>
|
||||
this.rows.filter(r => this.selectedRowIds().has(r.id))
|
||||
);
|
||||
|
||||
readonly stats = computed(() => ({
|
||||
total: this.rows.length,
|
||||
added: this.rows.filter(r => r.changeType === 'added').length,
|
||||
removed: this.rows.filter(r => r.changeType === 'removed').length,
|
||||
changed: this.rows.filter(r => r.changeType.includes('changed')).length
|
||||
}));
|
||||
|
||||
// Actions
|
||||
toggleRowExpand(row: DiffTableRow): void {
|
||||
this.expandedRowIds.update(ids => {
|
||||
const newIds = new Set(ids);
|
||||
if (newIds.has(row.id)) {
|
||||
newIds.delete(row.id);
|
||||
} else {
|
||||
newIds.add(row.id);
|
||||
this.rowExpand.emit(row); // Fetch details
|
||||
}
|
||||
return newIds;
|
||||
});
|
||||
}
|
||||
|
||||
toggleRowSelect(row: DiffTableRow): void {
|
||||
this.selectedRowIds.update(ids => {
|
||||
const newIds = new Set(ids);
|
||||
if (newIds.has(row.id)) {
|
||||
newIds.delete(row.id);
|
||||
} else {
|
||||
newIds.add(row.id);
|
||||
}
|
||||
return newIds;
|
||||
});
|
||||
this.rowSelect.emit(this.selectedRows());
|
||||
}
|
||||
|
||||
toggleSelectAll(): void {
|
||||
if (this.selectedRowIds().size === this.displayRows().length) {
|
||||
this.selectedRowIds.set(new Set());
|
||||
} else {
|
||||
this.selectedRowIds.set(new Set(this.displayRows().map(r => r.id)));
|
||||
}
|
||||
this.rowSelect.emit(this.selectedRows());
|
||||
}
|
||||
|
||||
onSort(column: string): void {
|
||||
this.sort.update(s => ({
|
||||
column,
|
||||
direction: s.column === column && s.direction === 'asc' ? 'desc' : 'asc'
|
||||
}));
|
||||
}
|
||||
|
||||
onFilterChange(filter: Partial<DiffTableFilter>): void {
|
||||
this.filter.update(f => ({ ...f, ...filter }));
|
||||
}
|
||||
|
||||
isRowExpanded(rowId: string): boolean {
|
||||
return this.expandedRowIds().has(rowId);
|
||||
}
|
||||
|
||||
isRowSelected(rowId: string): boolean {
|
||||
return this.selectedRowIds().has(rowId);
|
||||
}
|
||||
|
||||
getChangeTypeClass(type: string): string {
|
||||
return {
|
||||
'added': 'change-added',
|
||||
'removed': 'change-removed',
|
||||
'version-changed': 'change-upgraded',
|
||||
'license-changed': 'change-license',
|
||||
'both-changed': 'change-both'
|
||||
}[type] || '';
|
||||
}
|
||||
|
||||
getVulnDelta(impact?: VulnImpact): string {
|
||||
if (!impact) return '—';
|
||||
const delta = impact.introduced.length - impact.resolved.length;
|
||||
if (delta > 0) return `+${delta}`;
|
||||
if (delta < 0) return `${delta}`;
|
||||
return '0';
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### ExpandedRowComponent
|
||||
|
||||
```typescript
|
||||
// expanded-row.component.ts
|
||||
import { Component, Input, Output, EventEmitter } from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { ExpandedRowData, CveDetail } from '../models/diff-table.models';
|
||||
|
||||
@Component({
|
||||
selector: 'app-expanded-row',
|
||||
standalone: true,
|
||||
imports: [CommonModule],
|
||||
template: `
|
||||
<div class="expanded-content">
|
||||
<div class="metadata-section">
|
||||
<h4>Package Details</h4>
|
||||
<div class="metadata-grid">
|
||||
@for (entry of metadataEntries; track entry.key) {
|
||||
<div class="metadata-item">
|
||||
<span class="meta-label">{{ entry.key }}:</span>
|
||||
<span class="meta-value">{{ entry.value }}</span>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@if (data.versionHistory?.length) {
|
||||
<div class="history-section">
|
||||
<h4>Version History</h4>
|
||||
<ul class="version-list">
|
||||
@for (v of data.versionHistory; track v.version) {
|
||||
<li [class.current]="$first">
|
||||
<span class="version">{{ v.version }}</span>
|
||||
<span class="date">{{ v.date | date:'mediumDate' }}</span>
|
||||
@if ($first) { <span class="badge">Current</span> }
|
||||
</li>
|
||||
}
|
||||
</ul>
|
||||
</div>
|
||||
}
|
||||
|
||||
@if (data.cves?.length) {
|
||||
<div class="cve-section">
|
||||
<h4>CVE Impact</h4>
|
||||
<table class="cve-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>CVE</th>
|
||||
<th>Severity</th>
|
||||
<th>Status</th>
|
||||
<th>Impact</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
@for (cve of data.cves; track cve.id) {
|
||||
<tr [class]="'severity-' + cve.severity">
|
||||
<td><code>{{ cve.id }}</code></td>
|
||||
<td><span class="severity-badge">{{ cve.severity }}</span></td>
|
||||
<td>{{ cve.status }}</td>
|
||||
<td>{{ getCveImpact(cve) }}</td>
|
||||
</tr>
|
||||
}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
}
|
||||
|
||||
<div class="actions-section">
|
||||
<button class="btn-link" (click)="viewSbom.emit()">View SBOM Entry</button>
|
||||
<button class="btn-link" (click)="viewVex.emit()">View VEX</button>
|
||||
<button class="btn-link" (click)="copyPurl.emit()">Copy PURL</button>
|
||||
</div>
|
||||
</div>
|
||||
`,
|
||||
styleUrl: './expanded-row.component.scss'
|
||||
})
|
||||
export class ExpandedRowComponent {
|
||||
@Input({ required: true }) data!: ExpandedRowData;
|
||||
@Input() purl = '';
|
||||
@Input() introducedCves: string[] = [];
|
||||
@Input() resolvedCves: string[] = [];
|
||||
|
||||
@Output() viewSbom = new EventEmitter<void>();
|
||||
@Output() viewVex = new EventEmitter<void>();
|
||||
@Output() copyPurl = new EventEmitter<void>();
|
||||
|
||||
get metadataEntries(): { key: string; value: string }[] {
|
||||
return Object.entries(this.data.metadata || {}).map(([key, value]) => ({ key, value }));
|
||||
}
|
||||
|
||||
getCveImpact(cve: CveDetail): string {
|
||||
if (this.introducedCves.includes(cve.id)) return 'Introduced';
|
||||
if (this.resolvedCves.includes(cve.id)) return 'Resolved';
|
||||
return 'Unchanged';
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Styling (SCSS)
|
||||
|
||||
```scss
|
||||
// diff-table.component.scss
|
||||
:host {
|
||||
display: block;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.table-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.table-title {
|
||||
font-size: 16px;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.table-stats {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
font-size: 13px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.filter-section {
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.bulk-actions {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
padding: 8px 12px;
|
||||
background: var(--bg-highlight, #f0f7ff);
|
||||
border-radius: 6px;
|
||||
margin-bottom: 16px;
|
||||
|
||||
.selection-count {
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.action-btn {
|
||||
padding: 4px 12px;
|
||||
background: var(--accent-color);
|
||||
color: white;
|
||||
border: none;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
|
||||
&:hover {
|
||||
filter: brightness(1.1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.data-table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
background: var(--bg-primary);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 8px;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
thead th {
|
||||
background: var(--bg-secondary);
|
||||
padding: 12px 16px;
|
||||
text-align: left;
|
||||
font-weight: 600;
|
||||
font-size: 13px;
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
|
||||
&.sortable {
|
||||
cursor: pointer;
|
||||
user-select: none;
|
||||
|
||||
&:hover {
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tbody tr {
|
||||
border-bottom: 1px solid var(--border-light);
|
||||
|
||||
&:hover {
|
||||
background: var(--bg-hover, #f8f9fa);
|
||||
}
|
||||
|
||||
&.expanded {
|
||||
background: var(--bg-highlight, #f0f7ff);
|
||||
}
|
||||
}
|
||||
|
||||
tbody td {
|
||||
padding: 12px 16px;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.cell-expander {
|
||||
cursor: pointer;
|
||||
color: var(--text-secondary);
|
||||
|
||||
&:hover {
|
||||
color: var(--accent-color);
|
||||
}
|
||||
}
|
||||
|
||||
.cell-checkbox {
|
||||
width: 40px;
|
||||
|
||||
input[type="checkbox"] {
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
|
||||
.cell-version {
|
||||
font-family: monospace;
|
||||
font-size: 13px;
|
||||
|
||||
.version-arrow {
|
||||
color: var(--text-secondary);
|
||||
margin: 0 4px;
|
||||
}
|
||||
|
||||
.version-new {
|
||||
color: var(--color-success);
|
||||
}
|
||||
|
||||
.version-old {
|
||||
color: var(--text-secondary);
|
||||
text-decoration: line-through;
|
||||
}
|
||||
}
|
||||
|
||||
.cell-vulns {
|
||||
font-weight: 600;
|
||||
|
||||
&.positive { color: var(--color-danger); }
|
||||
&.negative { color: var(--color-success); }
|
||||
&.neutral { color: var(--text-secondary); }
|
||||
}
|
||||
|
||||
.change-badge {
|
||||
display: inline-block;
|
||||
padding: 2px 8px;
|
||||
border-radius: 12px;
|
||||
font-size: 11px;
|
||||
font-weight: 500;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.change-added {
|
||||
background: var(--color-success-light, #d4edda);
|
||||
color: var(--color-success, #155724);
|
||||
}
|
||||
|
||||
.change-removed {
|
||||
background: var(--color-danger-light, #f8d7da);
|
||||
color: var(--color-danger, #721c24);
|
||||
}
|
||||
|
||||
.change-upgraded {
|
||||
background: var(--color-info-light, #cce5ff);
|
||||
color: var(--color-info, #004085);
|
||||
}
|
||||
|
||||
.change-license {
|
||||
background: var(--color-warning-light, #fff3cd);
|
||||
color: var(--color-warning, #856404);
|
||||
}
|
||||
|
||||
.expanded-row-cell {
|
||||
padding: 0 !important;
|
||||
|
||||
.expanded-content {
|
||||
padding: 16px 24px;
|
||||
background: var(--bg-secondary);
|
||||
border-top: 1px solid var(--border-color);
|
||||
}
|
||||
}
|
||||
|
||||
// Dark mode
|
||||
:host-context(.dark-mode) {
|
||||
.data-table {
|
||||
background: var(--bg-primary-dark);
|
||||
border-color: var(--border-color-dark);
|
||||
}
|
||||
|
||||
thead th {
|
||||
background: var(--bg-secondary-dark);
|
||||
border-color: var(--border-color-dark);
|
||||
}
|
||||
|
||||
tbody tr:hover {
|
||||
background: var(--bg-hover-dark);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] Table displays all component changes with correct columns
|
||||
- [ ] Row expansion shows version history, CVE impact, metadata
|
||||
- [ ] Filter chips work: All, Added, Removed, Changed
|
||||
- [ ] Search filters by name and PURL
|
||||
- [ ] Column sorting works (asc/desc toggle)
|
||||
- [ ] Checkbox selection enables bulk actions
|
||||
- [ ] Export button generates selection data
|
||||
- [ ] Create Ticket button formats data for copy
|
||||
- [ ] Pagination handles 100+ items smoothly
|
||||
- [ ] Virtual scroll for 1000+ items (optional)
|
||||
- [ ] Dark mode styling works correctly
|
||||
- [ ] Keyboard navigation: Arrow keys, Enter to expand
|
||||
- [ ] Unit tests achieve ≥80% coverage
|
||||
|
||||
---
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status | Resolution |
|
||||
|----|---------------|--------|------------|
|
||||
| DR-001 | Virtual scroll: when to enable? | RESOLVED | Enable at >100 rows |
|
||||
| DR-002 | CVE details: inline or modal? | RESOLVED | Inline in expanded row |
|
||||
| DR-003 | Extend DataTable or build new? | RESOLVED | New component, reuse patterns |
|
||||
|
||||
---
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | Detailed implementation spec |
|
||||
795
docs/implplan/SPRINT_20251229_001_007_FE_pinned_explanations.md
Normal file
795
docs/implplan/SPRINT_20251229_001_007_FE_pinned_explanations.md
Normal file
@@ -0,0 +1,795 @@
|
||||
# SPRINT_20251229_001_007_FE_pinned_explanations
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 007 |
|
||||
| **MODULEID** | FE (Frontend) |
|
||||
| **Topic** | Pinned Explanations - Copy-Safe Ticket Creation |
|
||||
| **Working Directory** | `src/Web/StellaOps.Web/src/app/features/lineage/components/pinned-explanation/` |
|
||||
| **Status** | TODO |
|
||||
| **Priority** | P1 - UX Enhancement |
|
||||
| **Estimated Effort** | 2-3 days |
|
||||
| **Dependencies** | FE_005 (Explainer Timeline), FE_006 (Node Diff Table) |
|
||||
|
||||
---
|
||||
|
||||
## Context
|
||||
|
||||
Pinned Explanations allow users to capture explanation snippets for use in:
|
||||
- **Jira/GitHub tickets** - Paste pre-formatted evidence into issue descriptions
|
||||
- **Audit reports** - Copy evidence chains for compliance documentation
|
||||
- **Team communication** - Share findings in Slack/Teams with context
|
||||
- **Knowledge base** - Archive decision rationale for future reference
|
||||
|
||||
This feature bridges the gap between the interactive UI and external documentation needs.
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/product-advisories/archived/ADVISORY_SBOM_LINEAGE_GRAPH.md` (Pinned Explanations section)
|
||||
- FE_005 Explainer Timeline (source of explainer steps to pin)
|
||||
- FE_006 Node Diff Table (source of component changes to pin)
|
||||
- Existing: `src/app/core/services/clipboard.service.ts` (if exists)
|
||||
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Complete FE_005 (Explainer Timeline) for step pinning
|
||||
- [ ] Complete FE_006 (Node Diff Table) for component pinning
|
||||
- [ ] Review existing toast/notification patterns in codebase
|
||||
- [ ] Understand markdown rendering in target systems (Jira, GitHub, etc.)
|
||||
|
||||
---
|
||||
|
||||
## User Stories
|
||||
|
||||
| ID | Story | Acceptance Criteria |
|
||||
|----|-------|---------------------|
|
||||
| US-001 | As a security engineer, I want to pin an explanation step for my ticket | Pin button appears on steps, pinned items appear in panel |
|
||||
| US-002 | As an auditor, I want to copy multiple explanations as formatted text | Copy All button generates markdown for all pinned items |
|
||||
| US-003 | As a developer, I want to clear my pinned items | Clear button removes all pins, confirmation shown |
|
||||
| US-004 | As a user, I want my pins to persist in the session | Pins survive page navigation within the app |
|
||||
| US-005 | As a user, I want to format output for my target system | Format options: Markdown, Plain Text, JSON, HTML |
|
||||
|
||||
---
|
||||
|
||||
## Delivery Tracker
|
||||
|
||||
| ID | Task | Status | Est. | Notes |
|
||||
|----|------|--------|------|-------|
|
||||
| PE-001 | Create `PinnedExplanationService` | TODO | 0.5d | Session-based state management |
|
||||
| PE-002 | Create `PinnedPanelComponent` | TODO | 0.5d | Floating panel with pinned items |
|
||||
| PE-003 | Create `PinnedItemComponent` | TODO | 0.5d | Individual pinned item display |
|
||||
| PE-004 | Add pin buttons to Explainer Timeline | TODO | 0.25d | Integration with FE_005 |
|
||||
| PE-005 | Add pin buttons to Diff Table rows | TODO | 0.25d | Integration with FE_006 |
|
||||
| PE-006 | Implement format templates | TODO | 0.5d | Markdown, Plain, JSON, HTML |
|
||||
| PE-007 | Add copy-to-clipboard with toast | TODO | 0.25d | Use Clipboard API |
|
||||
| PE-008 | Session persistence | TODO | 0.25d | sessionStorage |
|
||||
| PE-009 | Dark mode styling | TODO | 0.25d | CSS variables |
|
||||
| PE-010 | Unit tests | TODO | 0.25d | ≥80% coverage |
|
||||
|
||||
---
|
||||
|
||||
## Component Architecture
|
||||
|
||||
```
|
||||
src/app/features/lineage/components/pinned-explanation/
|
||||
├── pinned-panel/
|
||||
│ ├── pinned-panel.component.ts # Floating panel container
|
||||
│ ├── pinned-panel.component.html
|
||||
│ └── pinned-panel.component.scss
|
||||
├── pinned-item/
|
||||
│ ├── pinned-item.component.ts # Individual pinned item
|
||||
│ └── pinned-item.component.scss
|
||||
├── format-selector/
|
||||
│ └── format-selector.component.ts # Format dropdown
|
||||
└── models/
|
||||
└── pinned.models.ts # Data interfaces
|
||||
|
||||
src/app/core/services/
|
||||
└── pinned-explanation.service.ts # Global state service
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Data Models
|
||||
|
||||
```typescript
|
||||
// pinned.models.ts
|
||||
|
||||
/**
|
||||
* A pinned explanation item.
|
||||
*/
|
||||
export interface PinnedItem {
|
||||
/** Unique ID for this pin */
|
||||
id: string;
|
||||
|
||||
/** Type of pinned content */
|
||||
type: PinnedItemType;
|
||||
|
||||
/** Source context (e.g., artifact ref, CVE ID) */
|
||||
sourceContext: string;
|
||||
|
||||
/** Short title for display */
|
||||
title: string;
|
||||
|
||||
/** Full content for export */
|
||||
content: string;
|
||||
|
||||
/** Structured data (optional, for JSON export) */
|
||||
data?: Record<string, unknown>;
|
||||
|
||||
/** When this was pinned */
|
||||
pinnedAt: Date;
|
||||
|
||||
/** Optional notes added by user */
|
||||
notes?: string;
|
||||
|
||||
/** CGS hash for verification */
|
||||
cgsHash?: string;
|
||||
}
|
||||
|
||||
export type PinnedItemType =
|
||||
| 'explainer-step'
|
||||
| 'component-change'
|
||||
| 'cve-status'
|
||||
| 'verdict'
|
||||
| 'attestation'
|
||||
| 'custom';
|
||||
|
||||
/**
|
||||
* Export format options.
|
||||
*/
|
||||
export type ExportFormat = 'markdown' | 'plain' | 'json' | 'html' | 'jira';
|
||||
|
||||
/**
|
||||
* Format templates for different export targets.
|
||||
*/
|
||||
export interface FormatTemplate {
|
||||
format: ExportFormat;
|
||||
label: string;
|
||||
icon: string;
|
||||
description: string;
|
||||
generateFn: (items: PinnedItem[]) => string;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Service Implementation
|
||||
|
||||
```typescript
|
||||
// pinned-explanation.service.ts
|
||||
import { Injectable, signal, computed } from '@angular/core';
|
||||
import { PinnedItem, PinnedItemType, ExportFormat } from '../models/pinned.models';
|
||||
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class PinnedExplanationService {
|
||||
private readonly STORAGE_KEY = 'stellaops-pinned-explanations';
|
||||
|
||||
// State
|
||||
private readonly _items = signal<PinnedItem[]>(this.loadFromSession());
|
||||
|
||||
// Computed
|
||||
readonly items = computed(() => this._items());
|
||||
readonly count = computed(() => this._items().length);
|
||||
readonly isEmpty = computed(() => this._items().length === 0);
|
||||
|
||||
/**
|
||||
* Pin a new item.
|
||||
*/
|
||||
pin(item: Omit<PinnedItem, 'id' | 'pinnedAt'>): void {
|
||||
const newItem: PinnedItem = {
|
||||
...item,
|
||||
id: crypto.randomUUID(),
|
||||
pinnedAt: new Date()
|
||||
};
|
||||
|
||||
this._items.update(items => [...items, newItem]);
|
||||
this.saveToSession();
|
||||
}
|
||||
|
||||
/**
|
||||
* Unpin an item by ID.
|
||||
*/
|
||||
unpin(id: string): void {
|
||||
this._items.update(items => items.filter(i => i.id !== id));
|
||||
this.saveToSession();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all pinned items.
|
||||
*/
|
||||
clearAll(): void {
|
||||
this._items.set([]);
|
||||
this.saveToSession();
|
||||
}
|
||||
|
||||
/**
|
||||
* Update notes on a pinned item.
|
||||
*/
|
||||
updateNotes(id: string, notes: string): void {
|
||||
this._items.update(items =>
|
||||
items.map(i => i.id === id ? { ...i, notes } : i)
|
||||
);
|
||||
this.saveToSession();
|
||||
}
|
||||
|
||||
/**
|
||||
* Export pinned items in specified format.
|
||||
*/
|
||||
export(format: ExportFormat): string {
|
||||
const items = this._items();
|
||||
|
||||
switch (format) {
|
||||
case 'markdown':
|
||||
return this.formatMarkdown(items);
|
||||
case 'plain':
|
||||
return this.formatPlainText(items);
|
||||
case 'json':
|
||||
return this.formatJson(items);
|
||||
case 'html':
|
||||
return this.formatHtml(items);
|
||||
case 'jira':
|
||||
return this.formatJira(items);
|
||||
default:
|
||||
return this.formatMarkdown(items);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy to clipboard with browser API.
|
||||
*/
|
||||
async copyToClipboard(format: ExportFormat): Promise<boolean> {
|
||||
const content = this.export(format);
|
||||
try {
|
||||
await navigator.clipboard.writeText(content);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Format methods
|
||||
private formatMarkdown(items: PinnedItem[]): string {
|
||||
const lines: string[] = [
|
||||
'## Pinned Evidence',
|
||||
'',
|
||||
`Generated: ${new Date().toISOString()}`,
|
||||
'',
|
||||
'---',
|
||||
''
|
||||
];
|
||||
|
||||
for (const item of items) {
|
||||
lines.push(`### ${item.title}`);
|
||||
lines.push('');
|
||||
lines.push(`**Type:** ${item.type}`);
|
||||
lines.push(`**Context:** ${item.sourceContext}`);
|
||||
if (item.cgsHash) {
|
||||
lines.push(`**CGS Hash:** \`${item.cgsHash}\``);
|
||||
}
|
||||
lines.push('');
|
||||
lines.push(item.content);
|
||||
if (item.notes) {
|
||||
lines.push('');
|
||||
lines.push(`> **Notes:** ${item.notes}`);
|
||||
}
|
||||
lines.push('');
|
||||
lines.push('---');
|
||||
lines.push('');
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
private formatPlainText(items: PinnedItem[]): string {
|
||||
return items.map(item => [
|
||||
`[${item.type.toUpperCase()}] ${item.title}`,
|
||||
`Context: ${item.sourceContext}`,
|
||||
item.cgsHash ? `CGS: ${item.cgsHash}` : null,
|
||||
'',
|
||||
item.content,
|
||||
item.notes ? `Notes: ${item.notes}` : null,
|
||||
'',
|
||||
'---'
|
||||
].filter(Boolean).join('\n')).join('\n\n');
|
||||
}
|
||||
|
||||
private formatJson(items: PinnedItem[]): string {
|
||||
return JSON.stringify({
|
||||
generated: new Date().toISOString(),
|
||||
count: items.length,
|
||||
items: items.map(item => ({
|
||||
type: item.type,
|
||||
title: item.title,
|
||||
sourceContext: item.sourceContext,
|
||||
content: item.content,
|
||||
cgsHash: item.cgsHash,
|
||||
notes: item.notes,
|
||||
data: item.data
|
||||
}))
|
||||
}, null, 2);
|
||||
}
|
||||
|
||||
private formatHtml(items: PinnedItem[]): string {
|
||||
const itemsHtml = items.map(item => `
|
||||
<div class="pinned-item">
|
||||
<h3>${this.escapeHtml(item.title)}</h3>
|
||||
<p><strong>Type:</strong> ${item.type}</p>
|
||||
<p><strong>Context:</strong> ${this.escapeHtml(item.sourceContext)}</p>
|
||||
${item.cgsHash ? `<p><strong>CGS:</strong> <code>${item.cgsHash}</code></p>` : ''}
|
||||
<div class="content">${this.escapeHtml(item.content)}</div>
|
||||
${item.notes ? `<blockquote>${this.escapeHtml(item.notes)}</blockquote>` : ''}
|
||||
</div>
|
||||
`).join('\n');
|
||||
|
||||
return `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head><title>Pinned Evidence</title></head>
|
||||
<body>
|
||||
<h1>Pinned Evidence</h1>
|
||||
<p>Generated: ${new Date().toISOString()}</p>
|
||||
<hr>
|
||||
${itemsHtml}
|
||||
</body>
|
||||
</html>`;
|
||||
}
|
||||
|
||||
private formatJira(items: PinnedItem[]): string {
|
||||
// Jira wiki markup
|
||||
return items.map(item => [
|
||||
`h3. ${item.title}`,
|
||||
`*Type:* ${item.type}`,
|
||||
`*Context:* ${item.sourceContext}`,
|
||||
item.cgsHash ? `*CGS:* {{${item.cgsHash}}}` : null,
|
||||
'',
|
||||
'{panel}',
|
||||
item.content,
|
||||
'{panel}',
|
||||
item.notes ? `{quote}${item.notes}{quote}` : null,
|
||||
'',
|
||||
'----'
|
||||
].filter(Boolean).join('\n')).join('\n\n');
|
||||
}
|
||||
|
||||
private escapeHtml(text: string): string {
|
||||
return text
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"');
|
||||
}
|
||||
|
||||
// Session persistence
|
||||
private loadFromSession(): PinnedItem[] {
|
||||
try {
|
||||
const stored = sessionStorage.getItem(this.STORAGE_KEY);
|
||||
if (stored) {
|
||||
const items = JSON.parse(stored) as PinnedItem[];
|
||||
return items.map(i => ({ ...i, pinnedAt: new Date(i.pinnedAt) }));
|
||||
}
|
||||
} catch {
|
||||
// Ignore parse errors
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
private saveToSession(): void {
|
||||
sessionStorage.setItem(this.STORAGE_KEY, JSON.stringify(this._items()));
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## UI Mockup
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────┐
|
||||
│ Pinned Evidence (3) [Clear All] │
|
||||
├─────────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌─────────────────────────────────────┐│
|
||||
│ │ 📝 VEX Consensus Step [✕] ││
|
||||
│ │ Context: CVE-2024-1234 ││
|
||||
│ │ ─────────────────────────────────── ││
|
||||
│ │ Result: not_affected (0.82) ││
|
||||
│ │ ││
|
||||
│ │ [Add Notes] ││
|
||||
│ └─────────────────────────────────────┘│
|
||||
│ │
|
||||
│ ┌─────────────────────────────────────┐│
|
||||
│ │ 📦 lodash Upgrade [✕] ││
|
||||
│ │ Context: v1.1 → v1.2 ││
|
||||
│ │ ─────────────────────────────────── ││
|
||||
│ │ 4.17.20 → 4.17.21 ││
|
||||
│ │ Resolved: CVE-2024-9999 ││
|
||||
│ │ ││
|
||||
│ │ Notes: "Upgrade approved in PR #42" ││
|
||||
│ └─────────────────────────────────────┘│
|
||||
│ │
|
||||
│ ┌─────────────────────────────────────┐│
|
||||
│ │ ✓ Final Verdict [✕] ││
|
||||
│ │ Context: registry/app:v1.2 ││
|
||||
│ │ ─────────────────────────────────── ││
|
||||
│ │ NOT_AFFECTED (87% confidence) ││
|
||||
│ │ CGS: sha256:abc123... ││
|
||||
│ └─────────────────────────────────────┘│
|
||||
│ │
|
||||
├─────────────────────────────────────────┤
|
||||
│ Format: [Markdown ▼] │
|
||||
│ │
|
||||
│ [Copy to Clipboard] [Download] │
|
||||
└─────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Pin Button Integration
|
||||
|
||||
### In Explainer Timeline (FE_005)
|
||||
|
||||
```typescript
|
||||
// Add to explainer-step.component.ts template
|
||||
<button
|
||||
class="pin-btn"
|
||||
[class.pinned]="isPinned"
|
||||
(click)="onPin($event)"
|
||||
[attr.aria-label]="isPinned ? 'Unpin this step' : 'Pin this step'">
|
||||
{{ isPinned ? '📌' : '📍' }}
|
||||
</button>
|
||||
```
|
||||
|
||||
### In Diff Table (FE_006)
|
||||
|
||||
```typescript
|
||||
// Add to row actions column
|
||||
<button
|
||||
class="pin-btn"
|
||||
[class.pinned]="isPinned(row.id)"
|
||||
(click)="togglePin(row, $event)">
|
||||
📍
|
||||
</button>
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Component Implementation
|
||||
|
||||
### PinnedPanelComponent
|
||||
|
||||
```typescript
|
||||
// pinned-panel.component.ts
|
||||
import { Component, inject, signal } from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { FormsModule } from '@angular/forms';
|
||||
import { trigger, state, style, transition, animate } from '@angular/animations';
|
||||
import { PinnedExplanationService } from '../../../core/services/pinned-explanation.service';
|
||||
import { PinnedItemComponent } from '../pinned-item/pinned-item.component';
|
||||
import { FormatSelectorComponent } from '../format-selector/format-selector.component';
|
||||
import { ExportFormat } from '../models/pinned.models';
|
||||
|
||||
@Component({
|
||||
selector: 'app-pinned-panel',
|
||||
standalone: true,
|
||||
imports: [CommonModule, FormsModule, PinnedItemComponent, FormatSelectorComponent],
|
||||
template: `
|
||||
<div class="pinned-panel" [class.open]="isOpen()" [@slideIn]>
|
||||
<div class="panel-header">
|
||||
<span class="panel-title">
|
||||
Pinned Evidence ({{ service.count() }})
|
||||
</span>
|
||||
<div class="panel-actions">
|
||||
@if (!service.isEmpty()) {
|
||||
<button class="btn-clear" (click)="confirmClear()">Clear All</button>
|
||||
}
|
||||
<button class="btn-toggle" (click)="toggle()">
|
||||
{{ isOpen() ? '▼' : '▲' }}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@if (isOpen()) {
|
||||
<div class="panel-body">
|
||||
@if (service.isEmpty()) {
|
||||
<div class="empty-state">
|
||||
<span class="empty-icon">📌</span>
|
||||
<p>No pinned items yet.</p>
|
||||
<p class="hint">Click the pin icon on any explanation or component to save it here.</p>
|
||||
</div>
|
||||
} @else {
|
||||
<div class="pinned-items">
|
||||
@for (item of service.items(); track item.id) {
|
||||
<app-pinned-item
|
||||
[item]="item"
|
||||
(unpin)="service.unpin(item.id)"
|
||||
(notesChange)="service.updateNotes(item.id, $event)"
|
||||
/>
|
||||
}
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
|
||||
@if (!service.isEmpty()) {
|
||||
<div class="panel-footer">
|
||||
<app-format-selector
|
||||
[selected]="selectedFormat()"
|
||||
(change)="selectedFormat.set($event)"
|
||||
/>
|
||||
|
||||
<div class="export-actions">
|
||||
<button class="btn-copy" (click)="copyToClipboard()">
|
||||
Copy to Clipboard
|
||||
</button>
|
||||
<button class="btn-download" (click)="download()">
|
||||
Download
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
}
|
||||
|
||||
@if (showToast()) {
|
||||
<div class="toast" [@fadeInOut]>{{ toastMessage() }}</div>
|
||||
}
|
||||
</div>
|
||||
`,
|
||||
animations: [
|
||||
trigger('slideIn', [
|
||||
state('void', style({ transform: 'translateY(100%)' })),
|
||||
state('*', style({ transform: 'translateY(0)' })),
|
||||
transition('void <=> *', animate('200ms ease-out'))
|
||||
]),
|
||||
trigger('fadeInOut', [
|
||||
state('void', style({ opacity: 0 })),
|
||||
state('*', style({ opacity: 1 })),
|
||||
transition('void <=> *', animate('150ms'))
|
||||
])
|
||||
],
|
||||
styleUrl: './pinned-panel.component.scss'
|
||||
})
|
||||
export class PinnedPanelComponent {
|
||||
readonly service = inject(PinnedExplanationService);
|
||||
|
||||
readonly isOpen = signal(false);
|
||||
readonly selectedFormat = signal<ExportFormat>('markdown');
|
||||
readonly showToast = signal(false);
|
||||
readonly toastMessage = signal('');
|
||||
|
||||
toggle(): void {
|
||||
this.isOpen.update(v => !v);
|
||||
}
|
||||
|
||||
confirmClear(): void {
|
||||
if (confirm('Clear all pinned items?')) {
|
||||
this.service.clearAll();
|
||||
this.showToastMessage('All items cleared');
|
||||
}
|
||||
}
|
||||
|
||||
async copyToClipboard(): Promise<void> {
|
||||
const success = await this.service.copyToClipboard(this.selectedFormat());
|
||||
this.showToastMessage(success ? 'Copied to clipboard!' : 'Copy failed');
|
||||
}
|
||||
|
||||
download(): void {
|
||||
const content = this.service.export(this.selectedFormat());
|
||||
const format = this.selectedFormat();
|
||||
const ext = format === 'json' ? 'json' : format === 'html' ? 'html' : 'md';
|
||||
const blob = new Blob([content], { type: 'text/plain' });
|
||||
const url = URL.createObjectURL(blob);
|
||||
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = `pinned-evidence-${Date.now()}.${ext}`;
|
||||
a.click();
|
||||
|
||||
URL.revokeObjectURL(url);
|
||||
this.showToastMessage('Downloaded!');
|
||||
}
|
||||
|
||||
private showToastMessage(message: string): void {
|
||||
this.toastMessage.set(message);
|
||||
this.showToast.set(true);
|
||||
setTimeout(() => this.showToast.set(false), 2000);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Styling (SCSS)
|
||||
|
||||
```scss
|
||||
// pinned-panel.component.scss
|
||||
:host {
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
right: 24px;
|
||||
width: 380px;
|
||||
z-index: 900;
|
||||
}
|
||||
|
||||
.pinned-panel {
|
||||
background: var(--bg-primary);
|
||||
border: 1px solid var(--border-color);
|
||||
border-bottom: none;
|
||||
border-radius: 8px 8px 0 0;
|
||||
box-shadow: 0 -4px 20px rgba(0, 0, 0, 0.15);
|
||||
}
|
||||
|
||||
.panel-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
padding: 12px 16px;
|
||||
background: var(--bg-secondary);
|
||||
border-radius: 8px 8px 0 0;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.panel-title {
|
||||
font-weight: 600;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.panel-actions {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.btn-clear {
|
||||
padding: 4px 8px;
|
||||
font-size: 12px;
|
||||
color: var(--text-secondary);
|
||||
background: none;
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
|
||||
&:hover {
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
}
|
||||
|
||||
.btn-toggle {
|
||||
background: none;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.panel-body {
|
||||
max-height: 400px;
|
||||
overflow-y: auto;
|
||||
padding: 16px;
|
||||
}
|
||||
|
||||
.empty-state {
|
||||
text-align: center;
|
||||
padding: 24px;
|
||||
color: var(--text-secondary);
|
||||
|
||||
.empty-icon {
|
||||
font-size: 32px;
|
||||
}
|
||||
|
||||
.hint {
|
||||
font-size: 12px;
|
||||
margin-top: 8px;
|
||||
}
|
||||
}
|
||||
|
||||
.pinned-items {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.panel-footer {
|
||||
padding: 12px 16px;
|
||||
border-top: 1px solid var(--border-color);
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.export-actions {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.btn-copy,
|
||||
.btn-download {
|
||||
padding: 6px 12px;
|
||||
font-size: 13px;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.btn-copy {
|
||||
background: var(--accent-color);
|
||||
color: white;
|
||||
border: none;
|
||||
|
||||
&:hover {
|
||||
filter: brightness(1.1);
|
||||
}
|
||||
}
|
||||
|
||||
.btn-download {
|
||||
background: none;
|
||||
border: 1px solid var(--border-color);
|
||||
|
||||
&:hover {
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
}
|
||||
|
||||
.toast {
|
||||
position: absolute;
|
||||
bottom: 60px;
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
padding: 8px 16px;
|
||||
background: #333;
|
||||
color: white;
|
||||
border-radius: 4px;
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
// Dark mode
|
||||
:host-context(.dark-mode) {
|
||||
.pinned-panel {
|
||||
background: var(--bg-primary-dark);
|
||||
border-color: var(--border-color-dark);
|
||||
}
|
||||
|
||||
.panel-header {
|
||||
background: var(--bg-secondary-dark);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] Pin button appears on explainer steps and diff table rows
|
||||
- [ ] Pinned items display in floating panel
|
||||
- [ ] Count badge shows number of pinned items
|
||||
- [ ] Unpin removes item from list
|
||||
- [ ] Notes can be added to pinned items
|
||||
- [ ] Format selector offers: Markdown, Plain Text, JSON, HTML, Jira
|
||||
- [ ] Copy to Clipboard works with all formats
|
||||
- [ ] Download generates correct file type
|
||||
- [ ] Pins persist across page navigation (session)
|
||||
- [ ] Clear All requires confirmation
|
||||
- [ ] Toast notifications confirm actions
|
||||
- [ ] Dark mode styling works correctly
|
||||
- [ ] Unit tests achieve ≥80% coverage
|
||||
|
||||
---
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status | Resolution |
|
||||
|----|---------------|--------|------------|
|
||||
| DR-001 | Persistence: sessionStorage vs localStorage | RESOLVED | sessionStorage (clear on tab close) |
|
||||
| DR-002 | Panel position: bottom-right fixed? | RESOLVED | Yes, floating panel |
|
||||
| DR-003 | Format support: which targets? | RESOLVED | Markdown, Plain, JSON, HTML, Jira |
|
||||
|
||||
---
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | Detailed implementation spec |
|
||||
@@ -0,0 +1,701 @@
|
||||
# SPRINT_20251229_001_008_FE_reachability_gate_diff
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 008 |
|
||||
| **MODULEID** | FE (Frontend) |
|
||||
| **Topic** | Reachability Gate Diff Visualization |
|
||||
| **Working Directory** | `src/Web/StellaOps.Web/src/app/features/lineage/components/reachability-diff/` |
|
||||
| **Status** | TODO |
|
||||
| **Priority** | P1 - UX Enhancement |
|
||||
| **Estimated Effort** | 3-4 days |
|
||||
| **Dependencies** | BE ReachGraph API |
|
||||
|
||||
---
|
||||
|
||||
## Context
|
||||
|
||||
Reachability analysis determines whether a vulnerable function can actually be called at runtime. Gates (auth checks, feature flags, config guards) can block execution paths, making vulnerabilities unexploitable even when present.
|
||||
|
||||
The Reachability Gate Diff shows:
|
||||
1. **Path counts** - How many call paths exist to the vulnerable code
|
||||
2. **Gate changes** - Auth, feature flag, config, or runtime gates added/removed
|
||||
3. **Confidence scores** - How certain we are about reachability status
|
||||
4. **Visual diff** - Before/after comparison between lineage nodes
|
||||
|
||||
An existing `reachability-diff-view.component.ts` provides basic functionality, but needs enhancement for gate visualization.
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/modules/reachgraph/architecture.md` (ReachGraph API)
|
||||
- `docs/product-advisories/archived/ADVISORY_SBOM_LINEAGE_GRAPH.md` (Reachability section)
|
||||
- Existing: `src/app/features/lineage/components/reachability-diff-view/`
|
||||
- Backend model: `ReachabilityDelta` from lineage.models.ts
|
||||
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Read ReachGraph architecture documentation
|
||||
- [ ] Review existing `reachability-diff-view.component.ts`
|
||||
- [ ] Understand `GateChange` model from backend
|
||||
- [ ] Review graph visualization patterns in codebase
|
||||
|
||||
---
|
||||
|
||||
## User Stories
|
||||
|
||||
| ID | Story | Acceptance Criteria |
|
||||
|----|-------|---------------------|
|
||||
| US-001 | As a security engineer, I want to see which gates protect a CVE | Gate icons show gate type and status |
|
||||
| US-002 | As a developer, I want to understand path changes | Path count comparison shows +/- |
|
||||
| US-003 | As an auditor, I want confidence levels explained | Confidence bar with factor breakdown |
|
||||
| US-004 | As a user, I want to expand gate details | Click gate to see description and evidence |
|
||||
|
||||
---
|
||||
|
||||
## Delivery Tracker
|
||||
|
||||
| ID | Task | Status | Est. | Notes |
|
||||
|----|------|--------|------|-------|
|
||||
| RD-001 | Enhance `ReachabilityDiffComponent` | TODO | 0.5d | Add gate visualization |
|
||||
| RD-002 | Create `GateChipComponent` | TODO | 0.5d | Individual gate display |
|
||||
| RD-003 | Create `PathComparisonComponent` | TODO | 0.5d | Before/after path counts |
|
||||
| RD-004 | Create `ConfidenceBarComponent` | TODO | 0.5d | Confidence visualization |
|
||||
| RD-005 | Add gate expansion panel | TODO | 0.5d | Gate details on click |
|
||||
| RD-006 | Wire to ReachGraph API | TODO | 0.5d | Service integration |
|
||||
| RD-007 | Add call graph mini-visualization | TODO | 0.5d | Simple path diagram |
|
||||
| RD-008 | Dark mode styling | TODO | 0.25d | CSS variables |
|
||||
| RD-009 | Unit tests | TODO | 0.25d | ≥80% coverage |
|
||||
|
||||
---
|
||||
|
||||
## Component Architecture
|
||||
|
||||
```
|
||||
src/app/features/lineage/components/reachability-diff/
|
||||
├── reachability-diff.component.ts # Enhanced main component
|
||||
├── reachability-diff.component.html
|
||||
├── reachability-diff.component.scss
|
||||
├── reachability-diff.component.spec.ts
|
||||
├── gate-chip/
|
||||
│ ├── gate-chip.component.ts # Individual gate badge
|
||||
│ └── gate-chip.component.scss
|
||||
├── path-comparison/
|
||||
│ ├── path-comparison.component.ts # Path count comparison
|
||||
│ └── path-comparison.component.scss
|
||||
├── confidence-bar/
|
||||
│ ├── confidence-bar.component.ts # Confidence visualization
|
||||
│ └── confidence-bar.component.scss
|
||||
├── call-path-mini/
|
||||
│ └── call-path-mini.component.ts # Mini call graph
|
||||
└── models/
|
||||
└── reachability-diff.models.ts # Local interfaces
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Data Models
|
||||
|
||||
```typescript
|
||||
// reachability-diff.models.ts
|
||||
|
||||
/**
|
||||
* Reachability delta from backend (extended).
|
||||
*/
|
||||
export interface ReachabilityDeltaDisplay {
|
||||
/** CVE identifier */
|
||||
cve: string;
|
||||
|
||||
/** Component PURL */
|
||||
purl: string;
|
||||
|
||||
/** Previous reachability status */
|
||||
previousReachable: boolean | null;
|
||||
|
||||
/** Current reachability status */
|
||||
currentReachable: boolean;
|
||||
|
||||
/** Status change type */
|
||||
changeType: 'became-reachable' | 'became-unreachable' | 'still-reachable' | 'still-unreachable' | 'unknown';
|
||||
|
||||
/** Previous path count */
|
||||
previousPathCount: number;
|
||||
|
||||
/** Current path count */
|
||||
currentPathCount: number;
|
||||
|
||||
/** Path count delta */
|
||||
pathDelta: number;
|
||||
|
||||
/** Confidence level (0.0 - 1.0) */
|
||||
confidence: number;
|
||||
|
||||
/** Confidence factors */
|
||||
confidenceFactors?: ConfidenceFactor[];
|
||||
|
||||
/** Gates that affect reachability */
|
||||
gates: GateDisplay[];
|
||||
|
||||
/** Gate changes between versions */
|
||||
gateChanges: GateChangeDisplay[];
|
||||
|
||||
/** Simplified call path (for visualization) */
|
||||
callPath?: CallPathNode[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Gate display model.
|
||||
*/
|
||||
export interface GateDisplay {
|
||||
/** Gate identifier */
|
||||
id: string;
|
||||
|
||||
/** Gate type */
|
||||
type: GateType;
|
||||
|
||||
/** Gate name/identifier in code */
|
||||
name: string;
|
||||
|
||||
/** Human-readable description */
|
||||
description: string;
|
||||
|
||||
/** Whether gate is active (blocking) */
|
||||
isActive: boolean;
|
||||
|
||||
/** Source file location */
|
||||
location?: string;
|
||||
|
||||
/** Configuration value (if config gate) */
|
||||
configValue?: string;
|
||||
}
|
||||
|
||||
export type GateType = 'auth' | 'feature-flag' | 'config' | 'runtime' | 'version-check' | 'platform-check';
|
||||
|
||||
/**
|
||||
* Gate change between versions.
|
||||
*/
|
||||
export interface GateChangeDisplay {
|
||||
/** Gate that changed */
|
||||
gate: GateDisplay;
|
||||
|
||||
/** Type of change */
|
||||
changeType: 'added' | 'removed' | 'modified';
|
||||
|
||||
/** Previous state (if modified) */
|
||||
previousState?: Partial<GateDisplay>;
|
||||
|
||||
/** Impact on reachability */
|
||||
impact: 'blocking' | 'unblocking' | 'neutral';
|
||||
}
|
||||
|
||||
/**
|
||||
* Confidence factor for reachability.
|
||||
*/
|
||||
export interface ConfidenceFactor {
|
||||
name: string;
|
||||
value: number;
|
||||
weight: number;
|
||||
source: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Call path node for visualization.
|
||||
*/
|
||||
export interface CallPathNode {
|
||||
/** Node ID */
|
||||
id: string;
|
||||
|
||||
/** Function/method name */
|
||||
name: string;
|
||||
|
||||
/** File location */
|
||||
file: string;
|
||||
|
||||
/** Line number */
|
||||
line: number;
|
||||
|
||||
/** Node type */
|
||||
type: 'entry' | 'intermediate' | 'gate' | 'vulnerable';
|
||||
|
||||
/** Gate at this node (if any) */
|
||||
gate?: GateDisplay;
|
||||
|
||||
/** Children in call tree */
|
||||
children?: CallPathNode[];
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## UI Mockup
|
||||
|
||||
```
|
||||
┌────────────────────────────────────────────────────────────────────────────┐
|
||||
│ Reachability Changes: v1.1 → v1.2 │
|
||||
│ 3 CVEs with reachability changes │
|
||||
├────────────────────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ ┌─────────────────────────────────────────────────────────────────────┐ │
|
||||
│ │ CVE-2024-1234 in pkg:npm/lodash@4.17.21 │ │
|
||||
│ │ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ │ │
|
||||
│ │ │ │
|
||||
│ │ Status: REACHABLE → UNREACHABLE │ │
|
||||
│ │ ┌────────────────────────────────────────────────────────┐ │ │
|
||||
│ │ │ ████████████████████████░░░░░░░░ 75% Confidence │ │ │
|
||||
│ │ └────────────────────────────────────────────────────────┘ │ │
|
||||
│ │ │ │
|
||||
│ │ Paths: 3 → 0 (−3) │ │
|
||||
│ │ │ │
|
||||
│ │ ┌─ Gates ────────────────────────────────────────────────────────┐ │ │
|
||||
│ │ │ + [🔐 auth] requireAdmin() Added - BLOCKING │ │ │
|
||||
│ │ │ Location: src/middleware/auth.ts:42 │ │ │
|
||||
│ │ │ "Requires admin role before template processing" │ │ │
|
||||
│ │ │ │ │ │
|
||||
│ │ │ + [🚩 flag] ENABLE_TEMPLATES Added - BLOCKING │ │ │
|
||||
│ │ │ Config: process.env.ENABLE_TEMPLATES = false │ │ │
|
||||
│ │ │ "Feature flag disables template engine" │ │ │
|
||||
│ │ │ │ │ │
|
||||
│ │ │ ~ [⚙️ config] MAX_TEMPLATE_SIZE Modified │ │ │
|
||||
│ │ │ Previous: 1MB | Current: 100KB │ │ │
|
||||
│ │ │ Impact: Neutral (doesn't affect reachability) │ │ │
|
||||
│ │ └────────────────────────────────────────────────────────────────┘ │ │
|
||||
│ │ │ │
|
||||
│ │ ┌─ Call Path (Simplified) ───────────────────────────────────────┐ │ │
|
||||
│ │ │ │ │ │
|
||||
│ │ │ [main.ts:1] ──▶ [server.ts:15] ──▶ [🔐 auth.ts:42] ──✗ │ │ │
|
||||
│ │ │ │ │ │ │
|
||||
│ │ │ └──▶ [🚩 config.ts:8] ──✗ │ │ │
|
||||
│ │ │ │ │ │ │
|
||||
│ │ │ └──▶ [lodash:vuln] │ │ │
|
||||
│ │ │ ↑ BLOCKED │ │ │
|
||||
│ │ └────────────────────────────────────────────────────────────────┘ │ │
|
||||
│ │ │ │
|
||||
│ │ [Expand Details] [Pin] [View Full Graph] │ │
|
||||
│ └─────────────────────────────────────────────────────────────────────┘ │
|
||||
│ │
|
||||
│ ┌─────────────────────────────────────────────────────────────────────┐ │
|
||||
│ │ CVE-2024-5678 in pkg:npm/express@4.18.2 │ │
|
||||
│ │ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ │ │
|
||||
│ │ │ │
|
||||
│ │ Status: UNREACHABLE → REACHABLE ⚠️ │ │
|
||||
│ │ ┌────────────────────────────────────────────────────────┐ │ │
|
||||
│ │ │ ████████████████████████████████ 90% Confidence │ │ │
|
||||
│ │ └────────────────────────────────────────────────────────┘ │ │
|
||||
│ │ │ │
|
||||
│ │ Paths: 0 → 2 (+2) │ │
|
||||
│ │ │ │
|
||||
│ │ ┌─ Gates ────────────────────────────────────────────────────────┐ │ │
|
||||
│ │ │ − [🚩 flag] DISABLE_JSON_PARSING Removed - UNBLOCKING │ │ │
|
||||
│ │ │ "Feature flag that disabled JSON parsing was removed" │ │ │
|
||||
│ │ └────────────────────────────────────────────────────────────────┘ │ │
|
||||
│ │ │ │
|
||||
│ │ [Expand Details] [Pin] [View Full Graph] │ │
|
||||
│ └─────────────────────────────────────────────────────────────────────┘ │
|
||||
│ │
|
||||
└────────────────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Component Implementation
|
||||
|
||||
### GateChipComponent
|
||||
|
||||
```typescript
|
||||
// gate-chip.component.ts
|
||||
import { Component, Input } from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { GateDisplay, GateChangeDisplay, GateType } from '../models/reachability-diff.models';
|
||||
|
||||
@Component({
|
||||
selector: 'app-gate-chip',
|
||||
standalone: true,
|
||||
imports: [CommonModule],
|
||||
template: `
|
||||
<div class="gate-chip"
|
||||
[class]="gateTypeClass"
|
||||
[class.added]="changeType === 'added'"
|
||||
[class.removed]="changeType === 'removed'"
|
||||
[class.blocking]="gate.isActive">
|
||||
<span class="gate-icon">{{ gateIcon }}</span>
|
||||
<span class="gate-type">{{ gate.type }}</span>
|
||||
<span class="gate-name">{{ gate.name }}</span>
|
||||
@if (changeType) {
|
||||
<span class="change-indicator">
|
||||
{{ changeType === 'added' ? '+' : changeType === 'removed' ? '−' : '~' }}
|
||||
</span>
|
||||
}
|
||||
@if (impactLabel) {
|
||||
<span class="impact-badge" [class]="impactClass">{{ impactLabel }}</span>
|
||||
}
|
||||
</div>
|
||||
`,
|
||||
styles: [`
|
||||
.gate-chip {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
padding: 4px 10px;
|
||||
border-radius: 16px;
|
||||
font-size: 12px;
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.gate-chip.auth { border-left: 3px solid #6366f1; }
|
||||
.gate-chip.feature-flag { border-left: 3px solid #f59e0b; }
|
||||
.gate-chip.config { border-left: 3px solid #8b5cf6; }
|
||||
.gate-chip.runtime { border-left: 3px solid #ec4899; }
|
||||
|
||||
.gate-chip.added { background: var(--color-success-light); }
|
||||
.gate-chip.removed { background: var(--color-danger-light); }
|
||||
|
||||
.gate-icon { font-size: 14px; }
|
||||
.gate-type {
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
font-size: 10px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
.gate-name { font-family: monospace; }
|
||||
|
||||
.change-indicator {
|
||||
font-weight: bold;
|
||||
margin-left: 4px;
|
||||
}
|
||||
|
||||
.impact-badge {
|
||||
padding: 2px 6px;
|
||||
border-radius: 10px;
|
||||
font-size: 10px;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.impact-badge.blocking {
|
||||
background: var(--color-success);
|
||||
color: white;
|
||||
}
|
||||
|
||||
.impact-badge.unblocking {
|
||||
background: var(--color-danger);
|
||||
color: white;
|
||||
}
|
||||
`]
|
||||
})
|
||||
export class GateChipComponent {
|
||||
@Input({ required: true }) gate!: GateDisplay;
|
||||
@Input() changeType?: 'added' | 'removed' | 'modified';
|
||||
@Input() impact?: 'blocking' | 'unblocking' | 'neutral';
|
||||
|
||||
get gateIcon(): string {
|
||||
const icons: Record<GateType, string> = {
|
||||
'auth': '🔐',
|
||||
'feature-flag': '🚩',
|
||||
'config': '⚙️',
|
||||
'runtime': '⏱️',
|
||||
'version-check': '🏷️',
|
||||
'platform-check': '💻'
|
||||
};
|
||||
return icons[this.gate.type] || '🔒';
|
||||
}
|
||||
|
||||
get gateTypeClass(): string {
|
||||
return this.gate.type;
|
||||
}
|
||||
|
||||
get impactLabel(): string {
|
||||
if (!this.impact || this.impact === 'neutral') return '';
|
||||
return this.impact === 'blocking' ? 'BLOCKING' : 'UNBLOCKING';
|
||||
}
|
||||
|
||||
get impactClass(): string {
|
||||
return this.impact || 'neutral';
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### ConfidenceBarComponent
|
||||
|
||||
```typescript
|
||||
// confidence-bar.component.ts
|
||||
import { Component, Input } from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { ConfidenceFactor } from '../models/reachability-diff.models';
|
||||
|
||||
@Component({
|
||||
selector: 'app-confidence-bar',
|
||||
standalone: true,
|
||||
imports: [CommonModule],
|
||||
template: `
|
||||
<div class="confidence-container">
|
||||
<div class="confidence-bar">
|
||||
<div class="confidence-fill"
|
||||
[style.width.%]="confidence * 100"
|
||||
[class]="confidenceClass">
|
||||
</div>
|
||||
</div>
|
||||
<span class="confidence-label">
|
||||
{{ (confidence * 100).toFixed(0) }}% Confidence
|
||||
</span>
|
||||
|
||||
@if (showFactors && factors?.length) {
|
||||
<div class="factors-breakdown">
|
||||
@for (factor of factors; track factor.name) {
|
||||
<div class="factor-row">
|
||||
<span class="factor-name">{{ factor.name }}</span>
|
||||
<div class="factor-bar">
|
||||
<div class="factor-fill" [style.width.%]="factor.value * 100"></div>
|
||||
</div>
|
||||
<span class="factor-value">{{ (factor.value * 100).toFixed(0) }}%</span>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
`,
|
||||
styles: [`
|
||||
.confidence-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.confidence-bar {
|
||||
height: 8px;
|
||||
background: var(--bg-secondary);
|
||||
border-radius: 4px;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.confidence-fill {
|
||||
height: 100%;
|
||||
border-radius: 4px;
|
||||
transition: width 0.3s ease;
|
||||
}
|
||||
|
||||
.confidence-fill.high { background: var(--color-success); }
|
||||
.confidence-fill.medium { background: var(--color-warning); }
|
||||
.confidence-fill.low { background: var(--color-danger); }
|
||||
|
||||
.confidence-label {
|
||||
font-size: 13px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.factors-breakdown {
|
||||
margin-top: 8px;
|
||||
padding: 8px;
|
||||
background: var(--bg-secondary);
|
||||
border-radius: 6px;
|
||||
}
|
||||
|
||||
.factor-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.factor-name {
|
||||
flex: 0 0 100px;
|
||||
font-size: 11px;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.factor-bar {
|
||||
flex: 1;
|
||||
height: 4px;
|
||||
background: var(--bg-tertiary);
|
||||
border-radius: 2px;
|
||||
}
|
||||
|
||||
.factor-fill {
|
||||
height: 100%;
|
||||
background: var(--accent-color);
|
||||
border-radius: 2px;
|
||||
}
|
||||
|
||||
.factor-value {
|
||||
flex: 0 0 40px;
|
||||
font-size: 11px;
|
||||
text-align: right;
|
||||
}
|
||||
`]
|
||||
})
|
||||
export class ConfidenceBarComponent {
|
||||
@Input({ required: true }) confidence!: number;
|
||||
@Input() factors?: ConfidenceFactor[];
|
||||
@Input() showFactors = false;
|
||||
|
||||
get confidenceClass(): string {
|
||||
if (this.confidence >= 0.7) return 'high';
|
||||
if (this.confidence >= 0.4) return 'medium';
|
||||
return 'low';
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### CallPathMiniComponent
|
||||
|
||||
```typescript
|
||||
// call-path-mini.component.ts
|
||||
import { Component, Input } from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { CallPathNode } from '../models/reachability-diff.models';
|
||||
|
||||
@Component({
|
||||
selector: 'app-call-path-mini',
|
||||
standalone: true,
|
||||
imports: [CommonModule],
|
||||
template: `
|
||||
<div class="call-path-mini">
|
||||
@for (node of flattenedPath; track node.id; let last = $last) {
|
||||
<div class="path-node" [class]="node.type">
|
||||
@if (node.gate) {
|
||||
<span class="gate-icon">{{ getGateIcon(node.gate.type) }}</span>
|
||||
}
|
||||
<span class="node-name">{{ node.name }}</span>
|
||||
@if (node.type === 'gate' && node.gate?.isActive) {
|
||||
<span class="blocked-indicator">✗</span>
|
||||
}
|
||||
</div>
|
||||
@if (!last) {
|
||||
<span class="path-arrow">──▶</span>
|
||||
}
|
||||
}
|
||||
@if (isBlocked) {
|
||||
<div class="blocked-label">BLOCKED</div>
|
||||
}
|
||||
</div>
|
||||
`,
|
||||
styles: [`
|
||||
.call-path-mini {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
gap: 4px;
|
||||
padding: 12px;
|
||||
background: var(--bg-secondary);
|
||||
border-radius: 6px;
|
||||
font-family: monospace;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.path-node {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
padding: 2px 8px;
|
||||
background: var(--bg-primary);
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.path-node.entry { border-color: var(--color-info); }
|
||||
.path-node.vulnerable {
|
||||
border-color: var(--color-danger);
|
||||
background: var(--color-danger-light);
|
||||
}
|
||||
.path-node.gate {
|
||||
border-color: var(--color-warning);
|
||||
}
|
||||
|
||||
.gate-icon { font-size: 14px; }
|
||||
.blocked-indicator {
|
||||
color: var(--color-danger);
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.path-arrow {
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.blocked-label {
|
||||
margin-left: 8px;
|
||||
padding: 2px 8px;
|
||||
background: var(--color-success);
|
||||
color: white;
|
||||
border-radius: 4px;
|
||||
font-size: 10px;
|
||||
font-weight: 600;
|
||||
}
|
||||
`]
|
||||
})
|
||||
export class CallPathMiniComponent {
|
||||
@Input() path?: CallPathNode[];
|
||||
|
||||
get flattenedPath(): CallPathNode[] {
|
||||
if (!this.path) return [];
|
||||
|
||||
// Flatten tree to linear path (simplified for display)
|
||||
const result: CallPathNode[] = [];
|
||||
const flatten = (node: CallPathNode) => {
|
||||
result.push(node);
|
||||
if (node.children?.[0]) {
|
||||
flatten(node.children[0]); // Follow first child only
|
||||
}
|
||||
};
|
||||
|
||||
if (this.path[0]) {
|
||||
flatten(this.path[0]);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
get isBlocked(): boolean {
|
||||
return this.flattenedPath.some(n => n.type === 'gate' && n.gate?.isActive);
|
||||
}
|
||||
|
||||
getGateIcon(type: string): string {
|
||||
const icons: Record<string, string> = {
|
||||
'auth': '🔐',
|
||||
'feature-flag': '🚩',
|
||||
'config': '⚙️',
|
||||
'runtime': '⏱️'
|
||||
};
|
||||
return icons[type] || '🔒';
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] Reachability changes display with status arrows (REACHABLE → UNREACHABLE)
|
||||
- [ ] Path count comparison shows delta (+/-)
|
||||
- [ ] Confidence bar displays with appropriate coloring
|
||||
- [ ] Gate chips show type, name, and change indicator
|
||||
- [ ] Gate expansion reveals description and location
|
||||
- [ ] Call path mini-visualization shows simplified path
|
||||
- [ ] Blocked gates show clear visual indicator
|
||||
- [ ] Pin button integrates with Pinned Explanations
|
||||
- [ ] Dark mode styling works correctly
|
||||
- [ ] Accessible: keyboard navigation, screen reader support
|
||||
- [ ] Unit tests achieve ≥80% coverage
|
||||
|
||||
---
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status | Resolution |
|
||||
|----|---------------|--------|------------|
|
||||
| DR-001 | Call path visualization: full graph or simplified? | RESOLVED | Simplified linear path |
|
||||
| DR-002 | Gate detail expansion: inline or modal? | RESOLVED | Inline accordion |
|
||||
| DR-003 | Confidence factors: always show or toggleable? | RESOLVED | Toggleable |
|
||||
|
||||
---
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | Detailed implementation spec |
|
||||
664
docs/implplan/SPRINT_20251229_001_009_FE_audit_pack_export.md
Normal file
664
docs/implplan/SPRINT_20251229_001_009_FE_audit_pack_export.md
Normal file
@@ -0,0 +1,664 @@
|
||||
# SPRINT_20251229_001_009_FE_audit_pack_export
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 009 |
|
||||
| **MODULEID** | FE (Frontend) |
|
||||
| **Topic** | Audit Pack Export UI |
|
||||
| **Working Directory** | `src/Web/StellaOps.Web/src/app/features/lineage/components/audit-pack-export/` |
|
||||
| **Status** | TODO |
|
||||
| **Priority** | P2 - Compliance Feature |
|
||||
| **Estimated Effort** | 2-3 days |
|
||||
| **Dependencies** | BE ExportCenter API |
|
||||
|
||||
---
|
||||
|
||||
## Context
|
||||
|
||||
Audit Pack Export generates compliance-ready evidence bundles for auditors, containing:
|
||||
- **SBOMs** - All SBOM versions in the lineage selection
|
||||
- **VEX documents** - All VEX statements affecting selected artifacts
|
||||
- **Delta attestations** - DSSE-signed verdicts between lineage nodes
|
||||
- **Proof traces** - Engine decision chains for each verdict
|
||||
- **Merkle root** - Content-addressable bundle verification
|
||||
|
||||
The existing `lineage-export-dialog.component.ts` provides basic export, but needs:
|
||||
1. Format selection (ZIP, NDJSON, tar.gz)
|
||||
2. Content options (include/exclude sections)
|
||||
3. Merkle root display with copy functionality
|
||||
4. Progress indication for large exports
|
||||
5. Signing options (keyless/keyed)
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/modules/exportcenter/architecture.md` (Export API)
|
||||
- `docs/product-advisories/archived/ADVISORY_SBOM_LINEAGE_GRAPH.md` (Audit Pack section)
|
||||
- Existing: `src/app/features/lineage/components/lineage-export-dialog/`
|
||||
- Backend model: `LineageEvidencePack` from ExportCenter
|
||||
|
||||
---
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Read ExportCenter architecture documentation
|
||||
- [ ] Review existing `lineage-export-dialog.component.ts`
|
||||
- [ ] Understand `LineageEvidencePack` model from backend
|
||||
- [ ] Review modal/dialog patterns in codebase
|
||||
|
||||
---
|
||||
|
||||
## User Stories
|
||||
|
||||
| ID | Story | Acceptance Criteria |
|
||||
|----|-------|---------------------|
|
||||
| US-001 | As an auditor, I want to download a complete evidence bundle | Download includes all selected artifacts and proofs |
|
||||
| US-002 | As a compliance officer, I want to verify bundle integrity | Merkle root displayed and copyable |
|
||||
| US-003 | As a user, I want to customize export contents | Checkboxes for each section |
|
||||
| US-004 | As a user, I want format options | ZIP, NDJSON, tar.gz selectable |
|
||||
| US-005 | As a user, I want to see export progress | Progress bar for large exports |
|
||||
|
||||
---
|
||||
|
||||
## Delivery Tracker
|
||||
|
||||
| ID | Task | Status | Est. | Notes |
|
||||
|----|------|--------|------|-------|
|
||||
| AE-001 | Enhance `AuditPackExportComponent` | TODO | 0.5d | Dialog component |
|
||||
| AE-002 | Create `ExportOptionsComponent` | TODO | 0.5d | Content checkboxes |
|
||||
| AE-003 | Create `FormatSelectorComponent` | TODO | 0.25d | Format dropdown |
|
||||
| AE-004 | Create `MerkleDisplayComponent` | TODO | 0.5d | Root hash display |
|
||||
| AE-005 | Add signing options | TODO | 0.25d | Keyless/keyed toggle |
|
||||
| AE-006 | Implement progress tracking | TODO | 0.5d | Progress bar + status |
|
||||
| AE-007 | Wire to ExportCenter API | TODO | 0.25d | Service integration |
|
||||
| AE-008 | Add download handling | TODO | 0.25d | Blob download |
|
||||
| AE-009 | Dark mode styling | TODO | 0.25d | CSS variables |
|
||||
| AE-010 | Unit tests | TODO | 0.25d | ≥80% coverage |
|
||||
|
||||
---
|
||||
|
||||
## Component Architecture
|
||||
|
||||
```
|
||||
src/app/features/lineage/components/audit-pack-export/
|
||||
├── audit-pack-export.component.ts # Dialog container
|
||||
├── audit-pack-export.component.html
|
||||
├── audit-pack-export.component.scss
|
||||
├── audit-pack-export.component.spec.ts
|
||||
├── export-options/
|
||||
│ ├── export-options.component.ts # Content selection
|
||||
│ └── export-options.component.scss
|
||||
├── format-selector/
|
||||
│ └── format-selector.component.ts # Format dropdown
|
||||
├── merkle-display/
|
||||
│ ├── merkle-display.component.ts # Hash display
|
||||
│ └── merkle-display.component.scss
|
||||
├── signing-options/
|
||||
│ └── signing-options.component.ts # Signing toggle
|
||||
└── models/
|
||||
└── audit-pack.models.ts # Local interfaces
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Data Models
|
||||
|
||||
```typescript
|
||||
// audit-pack.models.ts
|
||||
|
||||
/**
|
||||
* Audit pack export request.
|
||||
*/
|
||||
export interface AuditPackExportRequest {
|
||||
/** Artifact digests to include */
|
||||
artifactDigests: string[];
|
||||
|
||||
/** Tenant ID */
|
||||
tenantId: string;
|
||||
|
||||
/** Export format */
|
||||
format: ExportFormat;
|
||||
|
||||
/** Content options */
|
||||
options: ExportOptions;
|
||||
|
||||
/** Signing configuration */
|
||||
signing: SigningOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Export format options.
|
||||
*/
|
||||
export type ExportFormat = 'zip' | 'ndjson' | 'tar.gz';
|
||||
|
||||
/**
|
||||
* Content inclusion options.
|
||||
*/
|
||||
export interface ExportOptions {
|
||||
/** Include SBOM documents */
|
||||
includeSboms: boolean;
|
||||
|
||||
/** Include VEX documents */
|
||||
includeVex: boolean;
|
||||
|
||||
/** Include delta attestations */
|
||||
includeAttestations: boolean;
|
||||
|
||||
/** Include proof traces */
|
||||
includeProofTraces: boolean;
|
||||
|
||||
/** Include reachability data */
|
||||
includeReachability: boolean;
|
||||
|
||||
/** Include policy evaluation logs */
|
||||
includePolicyLogs: boolean;
|
||||
|
||||
/** SBOM format (if including SBOMs) */
|
||||
sbomFormat: 'cyclonedx' | 'spdx' | 'both';
|
||||
|
||||
/** VEX format (if including VEX) */
|
||||
vexFormat: 'openvex' | 'csaf' | 'both';
|
||||
}
|
||||
|
||||
/**
|
||||
* Signing options for export.
|
||||
*/
|
||||
export interface SigningOptions {
|
||||
/** Sign the bundle */
|
||||
signBundle: boolean;
|
||||
|
||||
/** Use keyless signing (Sigstore) */
|
||||
useKeyless: boolean;
|
||||
|
||||
/** Log to transparency log (Rekor) */
|
||||
useTransparencyLog: boolean;
|
||||
|
||||
/** Key ID (if not keyless) */
|
||||
keyId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Export response from API.
|
||||
*/
|
||||
export interface AuditPackExportResponse {
|
||||
/** Bundle identifier */
|
||||
bundleId: string;
|
||||
|
||||
/** Merkle root of the bundle */
|
||||
merkleRoot: string;
|
||||
|
||||
/** Bundle digest */
|
||||
bundleDigest: string;
|
||||
|
||||
/** Download URL (signed, time-limited) */
|
||||
downloadUrl: string;
|
||||
|
||||
/** Bundle size in bytes */
|
||||
sizeBytes: number;
|
||||
|
||||
/** Content summary */
|
||||
summary: ExportSummary;
|
||||
|
||||
/** Attestation info (if signed) */
|
||||
attestation?: AttestationInfo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Summary of exported content.
|
||||
*/
|
||||
export interface ExportSummary {
|
||||
sbomCount: number;
|
||||
vexCount: number;
|
||||
attestationCount: number;
|
||||
proofTraceCount: number;
|
||||
artifactCount: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Attestation information.
|
||||
*/
|
||||
export interface AttestationInfo {
|
||||
digest: string;
|
||||
rekorIndex?: number;
|
||||
rekorLogId?: string;
|
||||
issuer?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Export progress state.
|
||||
*/
|
||||
export interface ExportProgress {
|
||||
state: 'idle' | 'preparing' | 'generating' | 'signing' | 'complete' | 'error';
|
||||
percent: number;
|
||||
message: string;
|
||||
error?: string;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## UI Mockup
|
||||
|
||||
```
|
||||
┌──────────────────────────────────────────────────────────────────────────┐
|
||||
│ Export Audit Pack [✕] │
|
||||
├──────────────────────────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ Exporting evidence for 3 artifacts in lineage │
|
||||
│ registry/app:v1.0 → v1.1 → v1.2 │
|
||||
│ │
|
||||
│ ┌─ Content Options ───────────────────────────────────────────────────┐ │
|
||||
│ │ │ │
|
||||
│ │ [✓] SBOMs Format: [CycloneDX ▼] │ │
|
||||
│ │ SBOM documents for each artifact version │ │
|
||||
│ │ │ │
|
||||
│ │ [✓] VEX Documents Format: [OpenVEX ▼] │ │
|
||||
│ │ Vulnerability Exploitability eXchange statements │ │
|
||||
│ │ │ │
|
||||
│ │ [✓] Delta Attestations │ │
|
||||
│ │ DSSE-signed verdicts between versions │ │
|
||||
│ │ │ │
|
||||
│ │ [✓] Proof Traces │ │
|
||||
│ │ Engine decision chains for each verdict │ │
|
||||
│ │ │ │
|
||||
│ │ [ ] Reachability Data │ │
|
||||
│ │ Call graph analysis results │ │
|
||||
│ │ │ │
|
||||
│ │ [ ] Policy Evaluation Logs │ │
|
||||
│ │ Detailed policy rule match logs │ │
|
||||
│ │ │ │
|
||||
│ └──────────────────────────────────────────────────────────────────────┘ │
|
||||
│ │
|
||||
│ ┌─ Format ────────────────────────────────────────────────────────────┐ │
|
||||
│ │ │ │
|
||||
│ │ (●) ZIP Archive │ │
|
||||
│ │ ( ) NDJSON Stream │ │
|
||||
│ │ ( ) tar.gz Archive │ │
|
||||
│ │ │ │
|
||||
│ └──────────────────────────────────────────────────────────────────────┘ │
|
||||
│ │
|
||||
│ ┌─ Signing ───────────────────────────────────────────────────────────┐ │
|
||||
│ │ │ │
|
||||
│ │ [✓] Sign bundle │ │
|
||||
│ │ │ │
|
||||
│ │ (●) Keyless (Sigstore) │ │
|
||||
│ │ ( ) Use signing key: [Select key ▼] │ │
|
||||
│ │ │ │
|
||||
│ │ [✓] Log to Rekor transparency log │ │
|
||||
│ │ │ │
|
||||
│ └──────────────────────────────────────────────────────────────────────┘ │
|
||||
│ │
|
||||
│ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ │
|
||||
│ (After export completes) │
|
||||
│ │
|
||||
│ ┌─ Export Complete ───────────────────────────────────────────────────┐ │
|
||||
│ │ │ │
|
||||
│ │ ✓ Bundle generated successfully │ │
|
||||
│ │ │ │
|
||||
│ │ Merkle Root: │ │
|
||||
│ │ ┌────────────────────────────────────────────────────────────────┐ │ │
|
||||
│ │ │ sha256:a1b2c3d4e5f6... [Copy] 📋 │ │ │
|
||||
│ │ └────────────────────────────────────────────────────────────────┘ │ │
|
||||
│ │ │ │
|
||||
│ │ Bundle Size: 2.4 MB │ │
|
||||
│ │ │ │
|
||||
│ │ Contents: │ │
|
||||
│ │ • 3 SBOMs (CycloneDX 1.6) │ │
|
||||
│ │ • 12 VEX documents │ │
|
||||
│ │ • 8 attestations │ │
|
||||
│ │ • 15 proof traces │ │
|
||||
│ │ │ │
|
||||
│ │ Attestation: │ │
|
||||
│ │ • Rekor Index: 123456789 │ │
|
||||
│ │ • [View on Rekor] │ │
|
||||
│ │ │ │
|
||||
│ └──────────────────────────────────────────────────────────────────────┘ │
|
||||
│ │
|
||||
├──────────────────────────────────────────────────────────────────────────┤
|
||||
│ [Cancel] [Download Bundle] │
|
||||
└──────────────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Component Implementation
|
||||
|
||||
### AuditPackExportComponent
|
||||
|
||||
```typescript
|
||||
// audit-pack-export.component.ts
|
||||
import {
|
||||
Component, Input, Output, EventEmitter,
|
||||
signal, computed, inject, ChangeDetectionStrategy
|
||||
} from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { FormsModule } from '@angular/forms';
|
||||
import { ExportOptionsComponent } from './export-options/export-options.component';
|
||||
import { FormatSelectorComponent } from './format-selector/format-selector.component';
|
||||
import { SigningOptionsComponent } from './signing-options/signing-options.component';
|
||||
import { MerkleDisplayComponent } from './merkle-display/merkle-display.component';
|
||||
import { AuditPackService } from '../../../core/services/audit-pack.service';
|
||||
import {
|
||||
AuditPackExportRequest, AuditPackExportResponse,
|
||||
ExportOptions, ExportFormat, SigningOptions, ExportProgress
|
||||
} from './models/audit-pack.models';
|
||||
|
||||
@Component({
|
||||
selector: 'app-audit-pack-export',
|
||||
standalone: true,
|
||||
imports: [
|
||||
CommonModule, FormsModule,
|
||||
ExportOptionsComponent, FormatSelectorComponent,
|
||||
SigningOptionsComponent, MerkleDisplayComponent
|
||||
],
|
||||
templateUrl: './audit-pack-export.component.html',
|
||||
styleUrl: './audit-pack-export.component.scss',
|
||||
changeDetection: ChangeDetectionStrategy.OnPush
|
||||
})
|
||||
export class AuditPackExportComponent {
|
||||
private readonly service = inject(AuditPackService);
|
||||
|
||||
// Inputs
|
||||
@Input() artifactDigests: string[] = [];
|
||||
@Input() tenantId = '';
|
||||
@Input() artifactLabels: string[] = [];
|
||||
|
||||
// Outputs
|
||||
@Output() close = new EventEmitter<void>();
|
||||
@Output() exported = new EventEmitter<AuditPackExportResponse>();
|
||||
|
||||
// State
|
||||
readonly exportOptions = signal<ExportOptions>({
|
||||
includeSboms: true,
|
||||
includeVex: true,
|
||||
includeAttestations: true,
|
||||
includeProofTraces: true,
|
||||
includeReachability: false,
|
||||
includePolicyLogs: false,
|
||||
sbomFormat: 'cyclonedx',
|
||||
vexFormat: 'openvex'
|
||||
});
|
||||
|
||||
readonly format = signal<ExportFormat>('zip');
|
||||
|
||||
readonly signingOptions = signal<SigningOptions>({
|
||||
signBundle: true,
|
||||
useKeyless: true,
|
||||
useTransparencyLog: true
|
||||
});
|
||||
|
||||
readonly progress = signal<ExportProgress>({
|
||||
state: 'idle',
|
||||
percent: 0,
|
||||
message: ''
|
||||
});
|
||||
|
||||
readonly result = signal<AuditPackExportResponse | null>(null);
|
||||
|
||||
// Computed
|
||||
readonly isExporting = computed(() =>
|
||||
['preparing', 'generating', 'signing'].includes(this.progress().state)
|
||||
);
|
||||
|
||||
readonly isComplete = computed(() => this.progress().state === 'complete');
|
||||
readonly hasError = computed(() => this.progress().state === 'error');
|
||||
|
||||
readonly canExport = computed(() =>
|
||||
this.artifactDigests.length > 0 &&
|
||||
!this.isExporting() &&
|
||||
this.progress().state !== 'complete'
|
||||
);
|
||||
|
||||
// Actions
|
||||
async startExport(): Promise<void> {
|
||||
this.progress.set({ state: 'preparing', percent: 0, message: 'Preparing export...' });
|
||||
|
||||
const request: AuditPackExportRequest = {
|
||||
artifactDigests: this.artifactDigests,
|
||||
tenantId: this.tenantId,
|
||||
format: this.format(),
|
||||
options: this.exportOptions(),
|
||||
signing: this.signingOptions()
|
||||
};
|
||||
|
||||
try {
|
||||
// Simulate progress updates (actual would use SSE or polling)
|
||||
this.progress.set({ state: 'generating', percent: 30, message: 'Generating bundle...' });
|
||||
|
||||
const response = await this.service.exportAuditPack(request).toPromise();
|
||||
|
||||
if (this.signingOptions().signBundle) {
|
||||
this.progress.set({ state: 'signing', percent: 70, message: 'Signing bundle...' });
|
||||
}
|
||||
|
||||
this.progress.set({ state: 'complete', percent: 100, message: 'Export complete!' });
|
||||
this.result.set(response!);
|
||||
this.exported.emit(response!);
|
||||
|
||||
} catch (error) {
|
||||
this.progress.set({
|
||||
state: 'error',
|
||||
percent: 0,
|
||||
message: 'Export failed',
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async downloadBundle(): Promise<void> {
|
||||
const res = this.result();
|
||||
if (!res?.downloadUrl) return;
|
||||
|
||||
// Trigger download
|
||||
const a = document.createElement('a');
|
||||
a.href = res.downloadUrl;
|
||||
a.download = `audit-pack-${res.bundleId}.${this.format()}`;
|
||||
a.click();
|
||||
}
|
||||
|
||||
resetExport(): void {
|
||||
this.progress.set({ state: 'idle', percent: 0, message: '' });
|
||||
this.result.set(null);
|
||||
}
|
||||
|
||||
onOptionsChange(options: ExportOptions): void {
|
||||
this.exportOptions.set(options);
|
||||
}
|
||||
|
||||
onFormatChange(format: ExportFormat): void {
|
||||
this.format.set(format);
|
||||
}
|
||||
|
||||
onSigningChange(options: SigningOptions): void {
|
||||
this.signingOptions.set(options);
|
||||
}
|
||||
|
||||
onClose(): void {
|
||||
this.close.emit();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### MerkleDisplayComponent
|
||||
|
||||
```typescript
|
||||
// merkle-display.component.ts
|
||||
import { Component, Input, signal } from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
|
||||
@Component({
|
||||
selector: 'app-merkle-display',
|
||||
standalone: true,
|
||||
imports: [CommonModule],
|
||||
template: `
|
||||
<div class="merkle-display">
|
||||
<label class="merkle-label">Merkle Root:</label>
|
||||
<div class="merkle-hash-container">
|
||||
<code class="merkle-hash">{{ truncatedHash }}</code>
|
||||
<button
|
||||
class="copy-btn"
|
||||
[class.copied]="copied()"
|
||||
(click)="copyToClipboard()"
|
||||
[attr.aria-label]="copied() ? 'Copied!' : 'Copy hash'">
|
||||
{{ copied() ? '✓' : '📋' }}
|
||||
</button>
|
||||
</div>
|
||||
@if (copied()) {
|
||||
<span class="copied-toast">Copied to clipboard!</span>
|
||||
}
|
||||
</div>
|
||||
`,
|
||||
styles: [`
|
||||
.merkle-display {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.merkle-label {
|
||||
font-size: 12px;
|
||||
font-weight: 600;
|
||||
color: var(--text-secondary);
|
||||
}
|
||||
|
||||
.merkle-hash-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
padding: 8px 12px;
|
||||
background: var(--bg-secondary);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 6px;
|
||||
}
|
||||
|
||||
.merkle-hash {
|
||||
flex: 1;
|
||||
font-family: monospace;
|
||||
font-size: 13px;
|
||||
word-break: break-all;
|
||||
}
|
||||
|
||||
.copy-btn {
|
||||
flex-shrink: 0;
|
||||
padding: 4px 8px;
|
||||
background: none;
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
font-size: 14px;
|
||||
|
||||
&:hover {
|
||||
background: var(--bg-hover);
|
||||
}
|
||||
|
||||
&.copied {
|
||||
background: var(--color-success-light);
|
||||
border-color: var(--color-success);
|
||||
color: var(--color-success);
|
||||
}
|
||||
}
|
||||
|
||||
.copied-toast {
|
||||
font-size: 11px;
|
||||
color: var(--color-success);
|
||||
}
|
||||
`]
|
||||
})
|
||||
export class MerkleDisplayComponent {
|
||||
@Input({ required: true }) hash!: string;
|
||||
@Input() truncate = true;
|
||||
|
||||
readonly copied = signal(false);
|
||||
|
||||
get truncatedHash(): string {
|
||||
if (!this.truncate || this.hash.length <= 40) return this.hash;
|
||||
return `${this.hash.slice(0, 20)}...${this.hash.slice(-16)}`;
|
||||
}
|
||||
|
||||
async copyToClipboard(): Promise<void> {
|
||||
try {
|
||||
await navigator.clipboard.writeText(this.hash);
|
||||
this.copied.set(true);
|
||||
setTimeout(() => this.copied.set(false), 2000);
|
||||
} catch {
|
||||
// Fallback for older browsers
|
||||
const textarea = document.createElement('textarea');
|
||||
textarea.value = this.hash;
|
||||
document.body.appendChild(textarea);
|
||||
textarea.select();
|
||||
document.execCommand('copy');
|
||||
document.body.removeChild(textarea);
|
||||
this.copied.set(true);
|
||||
setTimeout(() => this.copied.set(false), 2000);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## API Integration
|
||||
|
||||
```typescript
|
||||
// audit-pack.service.ts
|
||||
import { Injectable, inject } from '@angular/core';
|
||||
import { HttpClient } from '@angular/common/http';
|
||||
import { Observable } from 'rxjs';
|
||||
import { AuditPackExportRequest, AuditPackExportResponse } from '../models/audit-pack.models';
|
||||
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class AuditPackService {
|
||||
private readonly http = inject(HttpClient);
|
||||
private readonly baseUrl = '/api/v1/export';
|
||||
|
||||
exportAuditPack(request: AuditPackExportRequest): Observable<AuditPackExportResponse> {
|
||||
return this.http.post<AuditPackExportResponse>(`${this.baseUrl}/audit-pack`, request);
|
||||
}
|
||||
|
||||
getExportStatus(bundleId: string): Observable<{ state: string; percent: number }> {
|
||||
return this.http.get<{ state: string; percent: number }>(
|
||||
`${this.baseUrl}/audit-pack/${bundleId}/status`
|
||||
);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] Dialog displays artifact summary
|
||||
- [ ] Content options checkboxes work correctly
|
||||
- [ ] Format selector offers ZIP, NDJSON, tar.gz
|
||||
- [ ] Signing options toggle between keyless/keyed
|
||||
- [ ] Progress bar shows export state
|
||||
- [ ] Merkle root displays after export completes
|
||||
- [ ] Copy hash button copies full hash to clipboard
|
||||
- [ ] Download button triggers file download
|
||||
- [ ] Export summary shows content counts
|
||||
- [ ] Rekor link opens transparency log entry
|
||||
- [ ] Error state displays meaningful message
|
||||
- [ ] Dark mode styling works correctly
|
||||
- [ ] Unit tests achieve ≥80% coverage
|
||||
|
||||
---
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status | Resolution |
|
||||
|----|---------------|--------|------------|
|
||||
| DR-001 | Progress tracking: polling vs SSE | PENDING | Start with polling, upgrade to SSE later |
|
||||
| DR-002 | Large export handling | PENDING | Add size warning for >10MB bundles |
|
||||
| DR-003 | Download method: direct URL vs blob | RESOLVED | Direct signed URL from backend |
|
||||
|
||||
---
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | Detailed implementation spec |
|
||||
150
docs/implplan/SPRINT_20251229_004_001_LIB_fixture_harvester.md
Normal file
150
docs/implplan/SPRINT_20251229_004_001_LIB_fixture_harvester.md
Normal file
@@ -0,0 +1,150 @@
|
||||
# SPRINT_20251229_004_001_LIB_fixture_harvester
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 004 |
|
||||
| **MODULEID** | LIB (Library/Tool) |
|
||||
| **Topic** | Fixture Harvester Tool for Test Infrastructure |
|
||||
| **Working Directory** | `src/__Tests/Tools/FixtureHarvester/` |
|
||||
| **Status** | TODO |
|
||||
|
||||
## Context
|
||||
|
||||
The advisory proposes a `FixtureHarvester` tool to acquire, curate, and pin test fixtures with cryptographic hashes. This supports the determinism and replay guarantees central to Stella Ops.
|
||||
|
||||
Existing infrastructure:
|
||||
- `src/__Tests/__Benchmarks/` - has golden corpus but no manifest system
|
||||
- `src/__Tests/__Datasets/` - ground truth without formal pinning
|
||||
- `StellaOps.Testing.Determinism/` - verification utilities
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `src/__Tests/AGENTS.md`
|
||||
- `docs/modules/replay/architecture.md`
|
||||
- `docs/dev/fixtures.md` (if exists)
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Review existing fixture directories structure
|
||||
- [ ] Understand `DeterminismVerifier` patterns
|
||||
- [ ] Read replay manifest schema
|
||||
|
||||
## Delivery Tracker
|
||||
|
||||
| ID | Task | Status | Assignee | Notes |
|
||||
|----|------|--------|----------|-------|
|
||||
| FH-001 | Create `fixtures.manifest.yml` schema | TODO | | Root manifest listing all fixture sets |
|
||||
| FH-002 | Create `meta.json` schema per fixture | TODO | | Source, retrieved_at, license, sha256, refresh_policy |
|
||||
| FH-003 | Implement `FixtureHarvester` CLI tool | TODO | | Fetch → hash → store → meta |
|
||||
| FH-004 | Add image digest pinning for OCI fixtures | TODO | | Pull by tag → record digest |
|
||||
| FH-005 | Add feed snapshot capture for Concelier fixtures | TODO | | Curate NVD/GHSA/OSV samples |
|
||||
| FH-006 | Add VEX document fixture sourcing | TODO | | OpenVEX/CSAF examples |
|
||||
| FH-007 | Add SBOM golden fixture generator | TODO | | Build minimal images, capture SBOMs |
|
||||
| FH-008 | Implement `FixtureValidationTests` | TODO | | Verify meta.json, hashes match |
|
||||
| FH-009 | Implement `GoldenRegen` command (manual) | TODO | | Regenerate expected outputs |
|
||||
| FH-010 | Document fixture tiers (T0-T3) | TODO | | Synthetic, spec examples, real samples, regressions |
|
||||
|
||||
## Fixture Manifest Schema
|
||||
|
||||
```yaml
|
||||
# fixtures.manifest.yml
|
||||
schemaVersion: "1.0"
|
||||
fixtures:
|
||||
sbom:
|
||||
- id: sbom-det-01
|
||||
description: "Deterministic SBOM - minimal 5-package image"
|
||||
source: "local-build"
|
||||
imageDigest: "sha256:abc123..."
|
||||
expectedSbomHash: "sha256:def456..."
|
||||
refreshPolicy: "manual"
|
||||
|
||||
feeds:
|
||||
- id: feed-osv-sample
|
||||
description: "30 OSV advisories across ecosystems"
|
||||
source: "https://api.osv.dev"
|
||||
count: 30
|
||||
capturedAt: "2025-12-29T00:00:00Z"
|
||||
sha256: "sha256:..."
|
||||
|
||||
vex:
|
||||
- id: vex-openvex-examples
|
||||
description: "OpenVEX specification examples"
|
||||
source: "https://github.com/openvex/examples"
|
||||
sha256: "sha256:..."
|
||||
```
|
||||
|
||||
## Meta.json Schema
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "sbom-det-01",
|
||||
"source": "local-build",
|
||||
"sourceUrl": null,
|
||||
"retrievedAt": "2025-12-29T10:00:00Z",
|
||||
"license": "CC0-1.0",
|
||||
"sha256": "sha256:abc123...",
|
||||
"refreshPolicy": "manual",
|
||||
"notes": "Minimal Alpine image with 5 OS packages for determinism testing"
|
||||
}
|
||||
```
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
src/__Tests/
|
||||
├── fixtures/
|
||||
│ ├── fixtures.manifest.yml
|
||||
│ ├── sbom/
|
||||
│ │ └── sbom-det-01/
|
||||
│ │ ├── meta.json
|
||||
│ │ ├── raw/
|
||||
│ │ │ └── image.tar.gz
|
||||
│ │ ├── normalized/
|
||||
│ │ │ └── sbom.cdx.json
|
||||
│ │ └── expected/
|
||||
│ │ └── sbom.cdx.json.sha256
|
||||
│ ├── feeds/
|
||||
│ │ └── feed-osv-sample/
|
||||
│ │ ├── meta.json
|
||||
│ │ ├── raw/
|
||||
│ │ └── expected/
|
||||
│ └── vex/
|
||||
│ └── vex-openvex-examples/
|
||||
│ ├── meta.json
|
||||
│ └── raw/
|
||||
└── Tools/
|
||||
└── FixtureHarvester/
|
||||
├── FixtureHarvester.csproj
|
||||
├── Program.cs
|
||||
├── Commands/
|
||||
│ ├── HarvestCommand.cs
|
||||
│ ├── ValidateCommand.cs
|
||||
│ └── RegenCommand.cs
|
||||
└── Models/
|
||||
├── FixtureManifest.cs
|
||||
└── FixtureMeta.cs
|
||||
```
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] `fixtures.manifest.yml` lists all fixture sets
|
||||
- [ ] Each fixture has `meta.json` with provenance
|
||||
- [ ] `dotnet run --project FixtureHarvester validate` passes
|
||||
- [ ] SHA256 hashes are stable across runs
|
||||
- [ ] CI can detect fixture drift via hash mismatch
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status |
|
||||
|----|---------------|--------|
|
||||
| DR-001 | Store large binaries in Git LFS? | PENDING |
|
||||
| DR-002 | Include real distro advisories (license)? | PENDING - prefer synthetic/spec examples |
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | From advisory analysis |
|
||||
@@ -0,0 +1,273 @@
|
||||
# SPRINT_20251229_004_002_BE_backport_status_service
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 004 |
|
||||
| **MODULEID** | BE (Backend) |
|
||||
| **Topic** | Backport Status Retrieval Service |
|
||||
| **Working Directory** | `src/Concelier/__Libraries/`, `src/Scanner/` |
|
||||
| **Status** | TODO |
|
||||
|
||||
## Context
|
||||
|
||||
The advisory proposes a deterministic algorithm for answering: "For a given (distro, release, package, version) and CVE, is it patched or vulnerable?"
|
||||
|
||||
Existing infrastructure:
|
||||
- Feedser has 4-tier evidence model (Tier 1-4 confidence)
|
||||
- Concelier has version range normalization (EVR, dpkg, apk, semver)
|
||||
- Scanner has `BinaryLookupStageExecutor` for binary-level vulnerability evidence
|
||||
|
||||
Gap: No unified `BackportStatusService` that composes these into a single deterministic verdict.
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/modules/feedser/architecture.md` (evidence tiers)
|
||||
- `docs/modules/concelier/architecture.md` (version normalization)
|
||||
- `docs/modules/scanner/architecture.md` (Binary Vulnerability Lookup)
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Read Feedser 4-tier evidence model
|
||||
- [ ] Understand Concelier version comparators
|
||||
- [ ] Review Scanner BinaryLookupStageExecutor
|
||||
|
||||
## Delivery Tracker
|
||||
|
||||
| ID | Task | Status | Assignee | Notes |
|
||||
|----|------|--------|----------|-------|
|
||||
| BP-001 | Define Fix Rule types (Boundary, Range, BuildDigest, Status) | TODO | | Core domain model |
|
||||
| BP-002 | Create `IFixRuleRepository` interface | TODO | | Query rules by (distro, pkg, CVE) |
|
||||
| BP-003 | Implement Debian security-tracker extractor | TODO | | Parse tracker JSON → BoundaryRules |
|
||||
| BP-004 | Implement Alpine secdb extractor | TODO | | Parse secfixes → BoundaryRules |
|
||||
| BP-005 | Implement RHEL/SUSE OVAL extractor | TODO | | Parse OVAL → Range/BoundaryRules |
|
||||
| BP-006 | Create `FixIndex` snapshot service | TODO | | Indexed by (distro, release, pkg) |
|
||||
| BP-007 | Implement `BackportStatusService.EvalPatchedStatus()` | TODO | | Core algorithm |
|
||||
| BP-008 | Wire binary digest matching from Scanner | TODO | | BuildDigestRule integration |
|
||||
| BP-009 | Add confidence scoring (high/medium/low) | TODO | | Per-tier confidence |
|
||||
| BP-010 | Add determinism tests for verdict stability | TODO | | Same input → same verdict |
|
||||
| BP-011 | Add evidence chain for audit | TODO | | Rule IDs + source pointers |
|
||||
|
||||
## Fix Rule Domain Model
|
||||
|
||||
```csharp
|
||||
// Location: src/Concelier/__Libraries/StellaOps.Concelier.BackportProof/Models/
|
||||
|
||||
/// <summary>
|
||||
/// Product context key for rule matching.
|
||||
/// </summary>
|
||||
public sealed record ProductContext(
|
||||
string Distro, // e.g., "debian", "alpine", "rhel"
|
||||
string Release, // e.g., "bookworm", "3.19", "9"
|
||||
string? RepoScope, // e.g., "main", "security"
|
||||
string? Architecture);
|
||||
|
||||
/// <summary>
|
||||
/// Package identity for rule matching.
|
||||
/// </summary>
|
||||
public sealed record PackageKey(
|
||||
PackageEcosystem Ecosystem, // rpm, deb, apk
|
||||
string PackageName,
|
||||
string? SourcePackageName);
|
||||
|
||||
/// <summary>
|
||||
/// Base class for fix rules.
|
||||
/// </summary>
|
||||
public abstract record FixRule
|
||||
{
|
||||
public required string RuleId { get; init; }
|
||||
public required string Cve { get; init; }
|
||||
public required ProductContext Context { get; init; }
|
||||
public required PackageKey Package { get; init; }
|
||||
public required RulePriority Priority { get; init; }
|
||||
public required decimal Confidence { get; init; }
|
||||
public required EvidencePointer Evidence { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// CVE is fixed at a specific version boundary.
|
||||
/// </summary>
|
||||
public sealed record BoundaryRule : FixRule
|
||||
{
|
||||
public required string FixedVersion { get; init; }
|
||||
public required IVersionComparator Comparator { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// CVE affects a version range.
|
||||
/// </summary>
|
||||
public sealed record RangeRule : FixRule
|
||||
{
|
||||
public required VersionRange AffectedRange { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// CVE status determined by exact binary build.
|
||||
/// </summary>
|
||||
public sealed record BuildDigestRule : FixRule
|
||||
{
|
||||
public required string BuildDigest { get; init; } // sha256 of binary
|
||||
public required string? BuildId { get; init; } // ELF build-id
|
||||
public required FixStatus Status { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Explicit status without version boundary.
|
||||
/// </summary>
|
||||
public sealed record StatusRule : FixRule
|
||||
{
|
||||
public required FixStatus Status { get; init; }
|
||||
}
|
||||
|
||||
public enum FixStatus
|
||||
{
|
||||
Patched,
|
||||
Vulnerable,
|
||||
NotAffected,
|
||||
WontFix,
|
||||
UnderInvestigation,
|
||||
Unknown
|
||||
}
|
||||
|
||||
public enum RulePriority
|
||||
{
|
||||
DistroNative = 100, // Highest
|
||||
VendorCsaf = 90,
|
||||
ThirdParty = 50 // Lowest
|
||||
}
|
||||
```
|
||||
|
||||
## Backport Status Service
|
||||
|
||||
```csharp
|
||||
// Location: src/Concelier/__Libraries/StellaOps.Concelier.BackportProof/Services/
|
||||
|
||||
public interface IBackportStatusService
|
||||
{
|
||||
/// <summary>
|
||||
/// Evaluate patched status for a package installation.
|
||||
/// </summary>
|
||||
ValueTask<BackportVerdict> EvalPatchedStatusAsync(
|
||||
ProductContext context,
|
||||
InstalledPackage package,
|
||||
string cve,
|
||||
CancellationToken ct);
|
||||
}
|
||||
|
||||
public sealed record InstalledPackage(
|
||||
PackageKey Key,
|
||||
string InstalledVersion,
|
||||
string? BuildDigest,
|
||||
string? SourcePackage);
|
||||
|
||||
public sealed record BackportVerdict(
|
||||
string Cve,
|
||||
FixStatus Status,
|
||||
VerdictConfidence Confidence,
|
||||
IReadOnlyList<string> AppliedRuleIds,
|
||||
IReadOnlyList<EvidencePointer> Evidence,
|
||||
bool HasConflict,
|
||||
string? ConflictReason);
|
||||
|
||||
public enum VerdictConfidence
|
||||
{
|
||||
High, // Explicit advisory/boundary
|
||||
Medium, // Inferred from range or fingerprint
|
||||
Low // Heuristic or fallback
|
||||
}
|
||||
```
|
||||
|
||||
## Evaluation Algorithm (Pseudocode)
|
||||
|
||||
```
|
||||
EvalPatchedStatus(context, pkg, cve):
|
||||
rules = FixIndex.GetRules(context, pkg.Key) ∪ FixIndex.GetRules(context, pkg.SourcePackage)
|
||||
|
||||
// 1. Not-affected wins immediately
|
||||
if any StatusRule(NotAffected) at highest priority:
|
||||
return NotAffected(High)
|
||||
|
||||
// 2. Exact build digest wins
|
||||
if any BuildDigestRule matches pkg.BuildDigest:
|
||||
return rule.Status(High)
|
||||
|
||||
// 3. Evaluate boundary rules
|
||||
boundaries = rules.OfType<BoundaryRule>().OrderByDescending(Priority)
|
||||
if boundaries.Any():
|
||||
topPriority = boundaries.Max(Priority)
|
||||
topRules = boundaries.Where(Priority == topPriority)
|
||||
|
||||
hasConflict = topRules.DistinctBy(FixedVersion).Count() > 1
|
||||
fixedVersion = hasConflict
|
||||
? topRules.Max(FixedVersion, pkg.Comparator) // Conservative
|
||||
: topRules.Min(FixedVersion, pkg.Comparator) // Precise
|
||||
|
||||
if pkg.Comparator.Compare(pkg.InstalledVersion, fixedVersion) >= 0:
|
||||
return Patched(hasConflict ? Medium : High)
|
||||
else:
|
||||
return Vulnerable(High)
|
||||
|
||||
// 4. Evaluate range rules
|
||||
ranges = rules.OfType<RangeRule>()
|
||||
if ranges.Any():
|
||||
inRange = ranges.Any(r => r.AffectedRange.Contains(pkg.InstalledVersion))
|
||||
return inRange ? Vulnerable(Medium) : Unknown(Low)
|
||||
|
||||
// 5. Fallback
|
||||
return Unknown(Low)
|
||||
```
|
||||
|
||||
## Distro-Specific Extractors
|
||||
|
||||
### Debian Security Tracker
|
||||
|
||||
```csharp
|
||||
// Parses https://security-tracker.debian.org/tracker/data/json
|
||||
public class DebianTrackerExtractor : IFixRuleExtractor
|
||||
{
|
||||
public IAsyncEnumerable<FixRule> ExtractAsync(Stream trackerJson, CancellationToken ct)
|
||||
{
|
||||
// Parse JSON, extract fixed versions per release/package
|
||||
// Emit BoundaryRule for each (CVE, package, release, fixed_version)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Alpine secdb
|
||||
|
||||
```csharp
|
||||
// Parses https://secdb.alpinelinux.org/
|
||||
public class AlpineSecdbExtractor : IFixRuleExtractor
|
||||
{
|
||||
public IAsyncEnumerable<FixRule> ExtractAsync(Stream secdbYaml, CancellationToken ct)
|
||||
{
|
||||
// Parse secfixes entries
|
||||
// First version in secfixes list for a CVE is the fix version
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] Fix rule types defined and serializable
|
||||
- [ ] At least 2 distro extractors implemented (Debian, Alpine)
|
||||
- [ ] `EvalPatchedStatus` returns deterministic verdicts
|
||||
- [ ] Confidence scores accurate per evidence tier
|
||||
- [ ] Evidence chain traceable to source documents
|
||||
- [ ] Unit tests with known backport cases
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status |
|
||||
|----|---------------|--------|
|
||||
| DR-001 | Store FixIndex in PostgreSQL vs in-memory? | PENDING - recommend hybrid |
|
||||
| DR-002 | How to handle distros without structured data? | PENDING - mark as Unknown |
|
||||
| DR-003 | Refresh frequency for distro feeds? | PENDING - tie to Concelier schedules |
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | From advisory analysis |
|
||||
288
docs/implplan/SPRINT_20251229_004_003_BE_vexlens_truth_tables.md
Normal file
288
docs/implplan/SPRINT_20251229_004_003_BE_vexlens_truth_tables.md
Normal file
@@ -0,0 +1,288 @@
|
||||
# SPRINT_20251229_004_003_BE_vexlens_truth_tables
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 004 |
|
||||
| **MODULEID** | BE (Backend) |
|
||||
| **Topic** | VexLens Lattice Merge Truth Table Tests |
|
||||
| **Working Directory** | `src/VexLens/__Tests/` |
|
||||
| **Status** | TODO |
|
||||
|
||||
## Context
|
||||
|
||||
VexLens has a defined lattice for VEX status merging:
|
||||
```
|
||||
unknown < under_investigation < not_affected | affected < fixed
|
||||
```
|
||||
|
||||
The advisory proposes systematic truth table tests to verify:
|
||||
1. Deterministic merge outcomes
|
||||
2. Conflict detection accuracy
|
||||
3. Same inputs → same verdict
|
||||
|
||||
Existing infrastructure:
|
||||
- `VexConsensusEngine` implements lattice join
|
||||
- `OpenVexNormalizer` and `CsafVexNormalizer` exist
|
||||
- Conflict tracking with `conflicts` array
|
||||
|
||||
Gap: No systematic truth table test coverage.
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/modules/vex-lens/architecture.md`
|
||||
- `src/VexLens/StellaOps.VexLens/Consensus/VexConsensusEngine.cs`
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Read VexLens lattice states from architecture doc
|
||||
- [ ] Understand consensus computation flow
|
||||
- [ ] Review existing VexLens tests
|
||||
|
||||
## Delivery Tracker
|
||||
|
||||
| ID | Task | Status | Assignee | Notes |
|
||||
|----|------|--------|----------|-------|
|
||||
| VTT-001 | Define truth table matrix (status × justification × scope) | TODO | | Exhaustive combinations |
|
||||
| VTT-002 | Create synthetic VEX fixtures for each cell | TODO | | OpenVEX format |
|
||||
| VTT-003 | Implement `VexLensTruthTableTests` class | TODO | | Theory-based tests |
|
||||
| VTT-004 | Add conflict detection tests | TODO | | Vendor A vs Vendor B |
|
||||
| VTT-005 | Add trust tier ordering tests | TODO | | Precedence verification |
|
||||
| VTT-006 | Add determinism verification | TODO | | 10 iterations same result |
|
||||
| VTT-007 | Add golden output snapshots | TODO | | Expected consensus JSON |
|
||||
| VTT-008 | Add recorded replay tests (10 seed cases) | TODO | | Inputs → verdict stability |
|
||||
| VTT-009 | Document edge cases in test comments | TODO | | For future maintainers |
|
||||
|
||||
## VEX Status Lattice
|
||||
|
||||
```
|
||||
┌─────────┐
|
||||
│ fixed │ (terminal)
|
||||
└────▲────┘
|
||||
│
|
||||
┌───────────────┼───────────────┐
|
||||
│ │ │
|
||||
┌─────▼─────┐ ┌─────▼─────┐ ┌─────▼─────┐
|
||||
│not_affected│ │ affected │ │ (tie) │
|
||||
└─────▲─────┘ └─────▲─────┘ └───────────┘
|
||||
│ │
|
||||
└───────┬───────┘
|
||||
│
|
||||
┌───────▼───────┐
|
||||
│under_investigation│
|
||||
└───────▲───────┘
|
||||
│
|
||||
┌───────▼───────┐
|
||||
│ unknown │ (bottom)
|
||||
└───────────────┘
|
||||
```
|
||||
|
||||
## Truth Table Matrix
|
||||
|
||||
### Single Issuer Tests
|
||||
|
||||
| Test ID | Input Status | Expected Output | Notes |
|
||||
|---------|--------------|-----------------|-------|
|
||||
| TT-001 | unknown | unknown | Identity |
|
||||
| TT-002 | under_investigation | under_investigation | Identity |
|
||||
| TT-003 | affected | affected | Identity |
|
||||
| TT-004 | not_affected | not_affected | Identity |
|
||||
| TT-005 | fixed | fixed | Identity |
|
||||
|
||||
### Two Issuer Merge Tests (Same Trust Tier)
|
||||
|
||||
| Test ID | Issuer A | Issuer B | Expected | Conflict? |
|
||||
|---------|----------|----------|----------|-----------|
|
||||
| TT-010 | unknown | unknown | unknown | No |
|
||||
| TT-011 | unknown | affected | affected | No |
|
||||
| TT-012 | unknown | not_affected | not_affected | No |
|
||||
| TT-013 | affected | not_affected | CONFLICT | Yes - must record |
|
||||
| TT-014 | affected | fixed | fixed | No |
|
||||
| TT-015 | not_affected | fixed | fixed | No |
|
||||
| TT-016 | under_investigation | affected | affected | No |
|
||||
| TT-017 | under_investigation | not_affected | not_affected | No |
|
||||
| TT-018 | affected | affected | affected | No |
|
||||
| TT-019 | not_affected | not_affected | not_affected | No |
|
||||
|
||||
### Trust Tier Precedence Tests
|
||||
|
||||
| Test ID | High Tier Status | Low Tier Status | Expected | Notes |
|
||||
|---------|------------------|-----------------|----------|-------|
|
||||
| TT-020 | affected | not_affected | affected | High tier wins |
|
||||
| TT-021 | not_affected | affected | not_affected | High tier wins |
|
||||
| TT-022 | unknown | affected | affected | Low tier provides info |
|
||||
|
||||
### Justification Impact Tests
|
||||
|
||||
| Test ID | Status | Justification | Expected Confidence |
|
||||
|---------|--------|---------------|---------------------|
|
||||
| TT-030 | not_affected | component_not_present | 0.95+ |
|
||||
| TT-031 | not_affected | vulnerable_code_not_in_execute_path | 0.90+ |
|
||||
| TT-032 | not_affected | inline_mitigations_already_exist | 0.85+ |
|
||||
| TT-033 | affected | no justification | 0.80+ |
|
||||
|
||||
## Test Implementation
|
||||
|
||||
```csharp
|
||||
// Location: src/VexLens/__Tests/StellaOps.VexLens.Tests/Consensus/VexLensTruthTableTests.cs
|
||||
|
||||
[Trait("Category", TestCategories.Determinism)]
|
||||
[Trait("Category", TestCategories.Golden)]
|
||||
public class VexLensTruthTableTests
|
||||
{
|
||||
private readonly VexConsensusEngine _engine;
|
||||
|
||||
public VexLensTruthTableTests()
|
||||
{
|
||||
_engine = new VexConsensusEngine(
|
||||
NullLogger<VexConsensusEngine>.Instance,
|
||||
new InMemoryIssuerRegistry());
|
||||
}
|
||||
|
||||
public static IEnumerable<object[]> SingleIssuerCases => new[]
|
||||
{
|
||||
new object[] { "TT-001", VexStatus.Unknown, VexStatus.Unknown },
|
||||
new object[] { "TT-002", VexStatus.UnderInvestigation, VexStatus.UnderInvestigation },
|
||||
new object[] { "TT-003", VexStatus.Affected, VexStatus.Affected },
|
||||
new object[] { "TT-004", VexStatus.NotAffected, VexStatus.NotAffected },
|
||||
new object[] { "TT-005", VexStatus.Fixed, VexStatus.Fixed },
|
||||
};
|
||||
|
||||
[Theory]
|
||||
[MemberData(nameof(SingleIssuerCases))]
|
||||
public async Task SingleIssuer_ReturnsIdentity(string testId, VexStatus input, VexStatus expected)
|
||||
{
|
||||
// Arrange
|
||||
var statement = CreateStatement("issuer-a", input);
|
||||
|
||||
// Act
|
||||
var result = await _engine.ComputeConsensusAsync(
|
||||
"CVE-2024-1234",
|
||||
"pkg:npm/lodash@4.17.21",
|
||||
new[] { statement },
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Status.Should().Be(expected, because: $"{testId}: single issuer should return identity");
|
||||
result.Conflicts.Should().BeEmpty();
|
||||
}
|
||||
|
||||
public static IEnumerable<object[]> TwoIssuerMergeCases => new[]
|
||||
{
|
||||
new object[] { "TT-010", VexStatus.Unknown, VexStatus.Unknown, VexStatus.Unknown, false },
|
||||
new object[] { "TT-011", VexStatus.Unknown, VexStatus.Affected, VexStatus.Affected, false },
|
||||
new object[] { "TT-013", VexStatus.Affected, VexStatus.NotAffected, VexStatus.Affected, true }, // CONFLICT
|
||||
new object[] { "TT-014", VexStatus.Affected, VexStatus.Fixed, VexStatus.Fixed, false },
|
||||
};
|
||||
|
||||
[Theory]
|
||||
[MemberData(nameof(TwoIssuerMergeCases))]
|
||||
public async Task TwoIssuers_SameTier_MergesCorrectly(
|
||||
string testId,
|
||||
VexStatus statusA,
|
||||
VexStatus statusB,
|
||||
VexStatus expected,
|
||||
bool expectConflict)
|
||||
{
|
||||
// Arrange
|
||||
var statementA = CreateStatement("issuer-a", statusA, TrustTier.Vendor);
|
||||
var statementB = CreateStatement("issuer-b", statusB, TrustTier.Vendor);
|
||||
|
||||
// Act
|
||||
var result = await _engine.ComputeConsensusAsync(
|
||||
"CVE-2024-1234",
|
||||
"pkg:npm/lodash@4.17.21",
|
||||
new[] { statementA, statementB },
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Status.Should().Be(expected, because: $"{testId}");
|
||||
result.Conflicts.Any().Should().Be(expectConflict, because: $"{testId}: conflict detection");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SameInputs_ProducesIdenticalOutput_Across10Iterations()
|
||||
{
|
||||
// Arrange
|
||||
var statements = CreateConflictingStatements();
|
||||
var results = new List<string>();
|
||||
|
||||
// Act
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
var result = await _engine.ComputeConsensusAsync(
|
||||
"CVE-2024-1234",
|
||||
"pkg:npm/lodash@4.17.21",
|
||||
statements,
|
||||
CancellationToken.None);
|
||||
|
||||
results.Add(JsonSerializer.Serialize(result, CanonicalJsonOptions.Default));
|
||||
}
|
||||
|
||||
// Assert
|
||||
results.Distinct().Should().HaveCount(1, "determinism: all iterations should produce identical JSON");
|
||||
}
|
||||
|
||||
private static NormalizedVexStatement CreateStatement(
|
||||
string issuerId,
|
||||
VexStatus status,
|
||||
TrustTier tier = TrustTier.Vendor)
|
||||
{
|
||||
return new NormalizedVexStatement
|
||||
{
|
||||
IssuerId = issuerId,
|
||||
Status = status,
|
||||
TrustTier = tier,
|
||||
Timestamp = DateTimeOffset.Parse("2025-01-01T00:00:00Z"),
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
ProductKey = "pkg:npm/lodash@4.17.21"
|
||||
};
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Synthetic Fixture Structure
|
||||
|
||||
```
|
||||
src/VexLens/__Tests/fixtures/truth-tables/
|
||||
├── single-issuer/
|
||||
│ ├── tt-001-unknown.openvex.json
|
||||
│ ├── tt-002-under-investigation.openvex.json
|
||||
│ └── ...
|
||||
├── two-issuer-merge/
|
||||
│ ├── tt-010-unknown-unknown.openvex.json
|
||||
│ ├── tt-013-conflict-affected-not-affected/
|
||||
│ │ ├── issuer-a.openvex.json
|
||||
│ │ └── issuer-b.openvex.json
|
||||
│ └── ...
|
||||
├── trust-tier-precedence/
|
||||
│ └── ...
|
||||
└── expected/
|
||||
├── tt-001.consensus.json
|
||||
├── tt-013.consensus.json # includes conflict array
|
||||
└── ...
|
||||
```
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] All truth table cells have corresponding tests
|
||||
- [ ] Conflict detection 100% accurate
|
||||
- [ ] Trust tier precedence correctly applied
|
||||
- [ ] Determinism verified (10 iterations)
|
||||
- [ ] Golden outputs match expected consensus
|
||||
- [ ] Tests run in <5 seconds total
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status |
|
||||
|----|---------------|--------|
|
||||
| DR-001 | How to handle 3+ way conflicts? | PENDING - record all disagreeing issuers |
|
||||
| DR-002 | Justification impacts confidence only, not status? | CONFIRMED per architecture |
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | From advisory analysis |
|
||||
298
docs/implplan/SPRINT_20251229_004_004_BE_scheduler_resilience.md
Normal file
298
docs/implplan/SPRINT_20251229_004_004_BE_scheduler_resilience.md
Normal file
@@ -0,0 +1,298 @@
|
||||
# SPRINT_20251229_004_004_BE_scheduler_resilience
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 004 |
|
||||
| **MODULEID** | BE (Backend) |
|
||||
| **Topic** | Scheduler Resilience and Chaos Tests |
|
||||
| **Working Directory** | `src/Scheduler/__Tests/` |
|
||||
| **Status** | TODO |
|
||||
|
||||
## Context
|
||||
|
||||
The advisory proposes testing:
|
||||
1. Idempotent job keys - prevent duplicate execution
|
||||
2. Retry jitter - bounded backoff verification
|
||||
3. Crash mid-run - exactly-once semantics
|
||||
4. Backpressure - queue depth handling
|
||||
|
||||
Existing infrastructure:
|
||||
- `GraphJobStateMachine` for state transitions
|
||||
- Distributed locks via PostgreSQL
|
||||
- Queue abstraction with retry configuration
|
||||
|
||||
Gap: Chaos and load tests not implemented.
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/modules/scheduler/architecture.md`
|
||||
- `src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/`
|
||||
- `src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/`
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Read Scheduler architecture doc
|
||||
- [ ] Understand GraphJobStateMachine
|
||||
- [ ] Review distributed lock implementation
|
||||
|
||||
## Delivery Tracker
|
||||
|
||||
| ID | Task | Status | Assignee | Notes |
|
||||
|----|------|--------|----------|-------|
|
||||
| SCH-001 | Implement idempotent job key tests | TODO | | Same key → one execution |
|
||||
| SCH-002 | Implement retry jitter verification tests | TODO | | Backoff within bounds |
|
||||
| SCH-003 | Implement crash recovery chaos test | TODO | | Kill worker mid-run |
|
||||
| SCH-004 | Implement backpressure load test | TODO | | 1k concurrent jobs |
|
||||
| SCH-005 | Add distributed lock contention tests | TODO | | Multi-worker scenarios |
|
||||
| SCH-006 | Add state machine transition tests | TODO | | Valid/invalid transitions |
|
||||
| SCH-007 | Add heartbeat timeout tests | TODO | | Stale lock cleanup |
|
||||
| SCH-008 | Add queue depth metrics verification | TODO | | Backpressure signals |
|
||||
|
||||
## Test Implementations
|
||||
|
||||
### SCH-001: Idempotent Job Keys
|
||||
|
||||
```csharp
|
||||
[Trait("Category", TestCategories.Integration)]
|
||||
public class SchedulerIdempotencyTests : IClassFixture<SchedulerPostgresFixture>
|
||||
{
|
||||
[Fact]
|
||||
public async Task SameJobKey_ExecutesOnlyOnce()
|
||||
{
|
||||
// Arrange
|
||||
var jobKey = $"scan:{Guid.NewGuid()}";
|
||||
var executionCount = 0;
|
||||
|
||||
var worker = CreateWorker(job =>
|
||||
{
|
||||
Interlocked.Increment(ref executionCount);
|
||||
return Task.CompletedTask;
|
||||
});
|
||||
|
||||
// Act - submit same job twice
|
||||
await _scheduler.EnqueueAsync(new ScanJob(jobKey, "image:latest"));
|
||||
await _scheduler.EnqueueAsync(new ScanJob(jobKey, "image:latest")); // duplicate
|
||||
|
||||
await worker.ProcessAllAsync(timeout: TimeSpan.FromSeconds(5));
|
||||
|
||||
// Assert
|
||||
executionCount.Should().Be(1, "idempotent key should prevent duplicate execution");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### SCH-002: Retry Jitter Verification
|
||||
|
||||
```csharp
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
public class RetryJitterTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData(1, 5_000, 10_000)] // Attempt 1: 5-10s
|
||||
[InlineData(2, 10_000, 20_000)] // Attempt 2: 10-20s
|
||||
[InlineData(3, 20_000, 40_000)] // Attempt 3: 20-40s
|
||||
[InlineData(5, 60_000, 120_000)] // Attempt 5: 60-120s (capped)
|
||||
public void RetryDelay_IsWithinExpectedBounds(int attempt, int minMs, int maxMs)
|
||||
{
|
||||
// Arrange
|
||||
var policy = new ExponentialBackoffPolicy(
|
||||
initialDelay: TimeSpan.FromSeconds(5),
|
||||
maxDelay: TimeSpan.FromMinutes(2),
|
||||
jitterFactor: 0.5);
|
||||
|
||||
// Act
|
||||
var delays = Enumerable.Range(0, 100)
|
||||
.Select(_ => policy.GetDelay(attempt))
|
||||
.ToList();
|
||||
|
||||
// Assert
|
||||
delays.Should().OnlyContain(d =>
|
||||
d.TotalMilliseconds >= minMs && d.TotalMilliseconds <= maxMs,
|
||||
$"attempt {attempt} delays should be within [{minMs}, {maxMs}]ms");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### SCH-003: Crash Recovery Chaos Test
|
||||
|
||||
```csharp
|
||||
[Trait("Category", TestCategories.Chaos)]
|
||||
public class SchedulerCrashRecoveryTests : IClassFixture<SchedulerPostgresFixture>
|
||||
{
|
||||
[Fact]
|
||||
public async Task WorkerKilledMidRun_JobRecoveredByAnotherWorker()
|
||||
{
|
||||
// Arrange
|
||||
var jobCompleted = new TaskCompletionSource<bool>();
|
||||
var firstWorkerStarted = new TaskCompletionSource<bool>();
|
||||
|
||||
// Worker 1: will be killed mid-execution
|
||||
var worker1 = CreateWorker(async job =>
|
||||
{
|
||||
firstWorkerStarted.SetResult(true);
|
||||
await Task.Delay(TimeSpan.FromMinutes(5)); // Long-running
|
||||
});
|
||||
|
||||
// Worker 2: will recover the job
|
||||
var worker2 = CreateWorker(async job =>
|
||||
{
|
||||
jobCompleted.SetResult(true);
|
||||
await Task.CompletedTask;
|
||||
});
|
||||
|
||||
// Act
|
||||
var jobId = await _scheduler.EnqueueAsync(new ScanJob("crash-test", "image:latest"));
|
||||
|
||||
// Wait for worker1 to start processing
|
||||
_ = worker1.StartAsync(CancellationToken.None);
|
||||
await firstWorkerStarted.Task;
|
||||
|
||||
// Kill worker1 (simulate crash)
|
||||
await worker1.DisposeAsync();
|
||||
|
||||
// Start worker2 (should claim orphaned job after heartbeat timeout)
|
||||
await Task.Delay(_options.HeartbeatTimeout + TimeSpan.FromSeconds(1));
|
||||
_ = worker2.StartAsync(CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
var completed = await Task.WhenAny(
|
||||
jobCompleted.Task,
|
||||
Task.Delay(TimeSpan.FromSeconds(30)));
|
||||
|
||||
completed.Should().Be(jobCompleted.Task, "job should be recovered by worker2");
|
||||
|
||||
var job = await _scheduler.GetJobAsync(jobId);
|
||||
job.State.Should().Be(JobState.Completed);
|
||||
job.Attempts.Should().Be(2, "crashed attempt + successful attempt");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CrashedJob_DoesNotExecuteTwice()
|
||||
{
|
||||
// Arrange
|
||||
var executionAttempts = new ConcurrentBag<string>();
|
||||
|
||||
var worker = CreateWorker(async job =>
|
||||
{
|
||||
executionAttempts.Add(job.Id.ToString());
|
||||
|
||||
if (executionAttempts.Count == 1)
|
||||
{
|
||||
// Simulate crash on first attempt
|
||||
throw new OperationCanceledException("Worker crashed");
|
||||
}
|
||||
|
||||
await Task.CompletedTask;
|
||||
});
|
||||
|
||||
// Act
|
||||
var jobId = await _scheduler.EnqueueAsync(new ScanJob("once-test", "image:latest"));
|
||||
await worker.ProcessAllAsync(timeout: TimeSpan.FromSeconds(30));
|
||||
|
||||
// Assert
|
||||
var job = await _scheduler.GetJobAsync(jobId);
|
||||
job.State.Should().Be(JobState.Completed);
|
||||
|
||||
// The job should appear in executionAttempts at most maxAttempts times
|
||||
executionAttempts.Count(id => id == jobId.ToString())
|
||||
.Should().BeLessOrEqualTo(_options.MaxRetries + 1);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### SCH-004: Backpressure Load Test
|
||||
|
||||
```csharp
|
||||
[Trait("Category", TestCategories.Performance)]
|
||||
public class SchedulerBackpressureTests : IClassFixture<SchedulerPostgresFixture>
|
||||
{
|
||||
[Fact]
|
||||
public async Task HighLoad_AppliesBackpressureCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
const int jobCount = 1000;
|
||||
const int maxConcurrent = 10;
|
||||
var concurrentCount = 0;
|
||||
var maxObservedConcurrency = 0;
|
||||
var processedCount = 0;
|
||||
|
||||
var worker = CreateWorkerWithConcurrencyLimit(maxConcurrent, async job =>
|
||||
{
|
||||
var current = Interlocked.Increment(ref concurrentCount);
|
||||
maxObservedConcurrency = Math.Max(maxObservedConcurrency, current);
|
||||
|
||||
await Task.Delay(10); // Simulate work
|
||||
|
||||
Interlocked.Decrement(ref concurrentCount);
|
||||
Interlocked.Increment(ref processedCount);
|
||||
});
|
||||
|
||||
// Act
|
||||
var enqueueTasks = Enumerable.Range(0, jobCount)
|
||||
.Select(i => _scheduler.EnqueueAsync(new ScanJob($"load-{i}", $"image:{i}")))
|
||||
.ToList();
|
||||
|
||||
await Task.WhenAll(enqueueTasks);
|
||||
await worker.ProcessAllAsync(timeout: TimeSpan.FromMinutes(2));
|
||||
|
||||
// Assert
|
||||
processedCount.Should().Be(jobCount, "all jobs should complete");
|
||||
maxObservedConcurrency.Should().BeLessOrEqualTo(maxConcurrent,
|
||||
"concurrency limit should be respected");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task QueueFull_RejectsNewJobs()
|
||||
{
|
||||
// Arrange
|
||||
var scheduler = CreateSchedulerWithQueueLimit(maxQueueDepth: 100);
|
||||
|
||||
// Fill the queue
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
await scheduler.EnqueueAsync(new ScanJob($"fill-{i}", $"image:{i}"));
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await scheduler.TryEnqueueAsync(new ScanJob("overflow", "image:overflow"));
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse("queue at capacity should reject new jobs");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Metrics to Verify
|
||||
|
||||
| Metric | Expected Behavior |
|
||||
|--------|-------------------|
|
||||
| `scheduler.jobs.inflight` | Respects concurrency limit |
|
||||
| `scheduler.jobs.queued` | Decreases as jobs complete |
|
||||
| `scheduler.retries.total` | Bounded by maxRetries |
|
||||
| `scheduler.heartbeat.missed` | Triggers recovery |
|
||||
| `scheduler.backpressure.rejections` | Fires when queue full |
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] Idempotent keys prevent duplicate execution
|
||||
- [ ] Retry jitter within configured bounds
|
||||
- [ ] Crashed jobs recovered by other workers
|
||||
- [ ] No duplicate execution after crash recovery
|
||||
- [ ] Backpressure limits concurrency correctly
|
||||
- [ ] Queue rejection works at capacity
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status |
|
||||
|----|---------------|--------|
|
||||
| DR-001 | Use Testcontainers or mock queue? | PENDING - Testcontainers for realism |
|
||||
| DR-002 | Heartbeat timeout for tests? | PENDING - 5s for fast test feedback |
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | From advisory analysis |
|
||||
331
docs/implplan/SPRINT_20251229_004_005_E2E_replayable_verdict.md
Normal file
331
docs/implplan/SPRINT_20251229_004_005_E2E_replayable_verdict.md
Normal file
@@ -0,0 +1,331 @@
|
||||
# SPRINT_20251229_004_005_E2E_replayable_verdict
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 004 |
|
||||
| **MODULEID** | E2E |
|
||||
| **Topic** | End-to-End Replayable Verdict Tests |
|
||||
| **Working Directory** | `src/__Tests/E2E/` |
|
||||
| **Status** | TODO |
|
||||
|
||||
## Context
|
||||
|
||||
The advisory proposes a scripted E2E path:
|
||||
```
|
||||
image → Scanner → Feedser → VexLens → signed verdict (DSSE) → UI delta view
|
||||
```
|
||||
|
||||
With capture of an artifacts bundle enabling byte-for-byte replay.
|
||||
|
||||
Existing infrastructure:
|
||||
- `ReplayManifest` v2 schema exists
|
||||
- Scanner `RecordModeService` captures replay bundles
|
||||
- `PolicySimulationInputLock` for pinning
|
||||
- EvidenceLocker with Merkle tree builder
|
||||
|
||||
Gap: No E2E test that validates the full pipeline with replay verification.
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/modules/replay/architecture.md`
|
||||
- `docs/replay/DETERMINISTIC_REPLAY.md`
|
||||
- `docs/modules/scanner/architecture.md` (Appendix A.0 - Replay/Record mode)
|
||||
- Sprint `SPRINT_20251229_001_001_BE_cgs_infrastructure`
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Read ReplayManifest v2 schema
|
||||
- [ ] Understand Scanner RecordModeService
|
||||
- [ ] Review EvidenceLocker bundle format
|
||||
|
||||
## Delivery Tracker
|
||||
|
||||
| ID | Task | Status | Assignee | Notes |
|
||||
|----|------|--------|----------|-------|
|
||||
| E2E-001 | Create golden bundle fixture | TODO | | Pinned image + feeds + policy |
|
||||
| E2E-002 | Implement E2E pipeline test | TODO | | Scan → VEX → verdict |
|
||||
| E2E-003 | Implement replay verification test | TODO | | Bundle → same verdict |
|
||||
| E2E-004 | Implement delta verdict test | TODO | | v1 vs v2 diff |
|
||||
| E2E-005 | Implement DSSE signature verification | TODO | | Test keypair |
|
||||
| E2E-006 | Implement offline/air-gap replay test | TODO | | No network |
|
||||
| E2E-007 | Add `stella verify --bundle` CLI command | TODO | | User-facing replay |
|
||||
| E2E-008 | Add cross-platform replay test | TODO | | Ubuntu/Alpine runners |
|
||||
|
||||
## Golden Bundle Structure
|
||||
|
||||
```
|
||||
tests/fixtures/e2e/bundle-0001/
|
||||
├── manifest.json # ReplayManifest v2
|
||||
├── inputs/
|
||||
│ ├── image.digest # sha256:abc123...
|
||||
│ ├── sbom.cdx.json # Canonical SBOM
|
||||
│ ├── feeds/
|
||||
│ │ ├── osv-snapshot.json # Pinned OSV subset
|
||||
│ │ └── ghsa-snapshot.json # Pinned GHSA subset
|
||||
│ ├── vex/
|
||||
│ │ └── vendor.openvex.json
|
||||
│ └── policy/
|
||||
│ ├── rules.yaml
|
||||
│ └── score-policy.yaml
|
||||
├── outputs/
|
||||
│ ├── verdict.json # Expected verdict
|
||||
│ ├── verdict.dsse.json # DSSE envelope
|
||||
│ └── findings.json # Expected findings
|
||||
├── attestation/
|
||||
│ ├── test-keypair.pem # Test signing key
|
||||
│ └── public-key.pem
|
||||
└── meta.json # Bundle metadata
|
||||
```
|
||||
|
||||
## Manifest Schema (ReplayManifest v2)
|
||||
|
||||
```json
|
||||
{
|
||||
"schemaVersion": "2.0",
|
||||
"bundleId": "bundle-0001",
|
||||
"createdAt": "2025-12-29T00:00:00.000000Z",
|
||||
"scan": {
|
||||
"id": "e2e-test-scan-001",
|
||||
"imageDigest": "sha256:abc123...",
|
||||
"policyDigest": "sha256:policy123...",
|
||||
"scorePolicyDigest": "sha256:score123...",
|
||||
"feedSnapshotDigest": "sha256:feeds123...",
|
||||
"toolchain": "stellaops/scanner:test",
|
||||
"analyzerSetDigest": "sha256:analyzers..."
|
||||
},
|
||||
"inputs": {
|
||||
"sbom": { "path": "inputs/sbom.cdx.json", "sha256": "..." },
|
||||
"feeds": { "path": "inputs/feeds/", "sha256": "..." },
|
||||
"vex": { "path": "inputs/vex/", "sha256": "..." },
|
||||
"policy": { "path": "inputs/policy/", "sha256": "..." }
|
||||
},
|
||||
"expectedOutputs": {
|
||||
"verdict": { "path": "outputs/verdict.json", "sha256": "..." },
|
||||
"verdictHash": "sha256:verdict-content-hash..."
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Test Implementations
|
||||
|
||||
### E2E-002: Full Pipeline Test
|
||||
|
||||
```csharp
|
||||
[Trait("Category", TestCategories.Integration)]
|
||||
[Trait("Category", TestCategories.E2E)]
|
||||
public class ReplayableVerdictE2ETests : IClassFixture<StellaOpsE2EFixture>
|
||||
{
|
||||
private readonly StellaOpsE2EFixture _fixture;
|
||||
|
||||
[Fact]
|
||||
public async Task FullPipeline_ProducesConsistentVerdict()
|
||||
{
|
||||
// Arrange - load golden bundle
|
||||
var bundle = await BundleLoader.LoadAsync("fixtures/e2e/bundle-0001");
|
||||
|
||||
// Act - execute full pipeline
|
||||
var scanResult = await _fixture.Scanner.ScanAsync(
|
||||
bundle.ImageDigest,
|
||||
new ScanOptions { RecordMode = true });
|
||||
|
||||
var vexConsensus = await _fixture.VexLens.ComputeConsensusAsync(
|
||||
scanResult.SbomDigest,
|
||||
bundle.FeedSnapshot);
|
||||
|
||||
var verdict = await _fixture.VerdictBuilder.BuildAsync(
|
||||
new EvidencePack(
|
||||
scanResult.SbomCanonJson,
|
||||
vexConsensus.StatementsCanonJson,
|
||||
scanResult.ReachabilityGraphJson,
|
||||
bundle.FeedSnapshotDigest),
|
||||
bundle.PolicyLock,
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
verdict.CgsHash.Should().Be(bundle.ExpectedVerdictHash,
|
||||
"full pipeline should produce expected verdict hash");
|
||||
|
||||
var verdictJson = JsonSerializer.Serialize(verdict.Verdict, CanonicalJsonOptions.Default);
|
||||
var expectedJson = await File.ReadAllTextAsync(bundle.ExpectedVerdictPath);
|
||||
verdictJson.Should().Be(expectedJson,
|
||||
"verdict JSON should match golden output");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### E2E-003: Replay Verification Test
|
||||
|
||||
```csharp
|
||||
[Trait("Category", TestCategories.Determinism)]
|
||||
public class ReplayVerificationTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ReplayFromBundle_ProducesIdenticalVerdict()
|
||||
{
|
||||
// Arrange
|
||||
var bundle = await BundleLoader.LoadAsync("fixtures/e2e/bundle-0001");
|
||||
var originalVerdictHash = bundle.ExpectedVerdictHash;
|
||||
|
||||
// Act - replay the verdict
|
||||
var replayedVerdict = await _verdictBuilder.ReplayAsync(
|
||||
bundle.Manifest,
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
replayedVerdict.CgsHash.Should().Be(originalVerdictHash,
|
||||
"replayed verdict should have identical hash");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayOnDifferentMachine_ProducesIdenticalVerdict()
|
||||
{
|
||||
// This test runs on multiple CI runners (Ubuntu, Alpine, Debian)
|
||||
// and verifies the verdict hash is identical
|
||||
|
||||
var bundle = await BundleLoader.LoadAsync("fixtures/e2e/bundle-0001");
|
||||
|
||||
var verdict = await _verdictBuilder.BuildAsync(
|
||||
bundle.ToEvidencePack(),
|
||||
bundle.PolicyLock,
|
||||
CancellationToken.None);
|
||||
|
||||
// The expected hash is committed in the bundle
|
||||
verdict.CgsHash.Should().Be(bundle.ExpectedVerdictHash,
|
||||
$"verdict on {Environment.OSVersion} should match golden hash");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### E2E-004: Delta Verdict Test
|
||||
|
||||
```csharp
|
||||
[Fact]
|
||||
public async Task DeltaVerdict_ShowsExpectedChanges()
|
||||
{
|
||||
// Arrange - two versions of same image
|
||||
var bundleV1 = await BundleLoader.LoadAsync("fixtures/e2e/bundle-0001");
|
||||
var bundleV2 = await BundleLoader.LoadAsync("fixtures/e2e/bundle-0002");
|
||||
|
||||
var verdictV1 = await _verdictBuilder.BuildAsync(bundleV1.ToEvidencePack(), bundleV1.PolicyLock);
|
||||
var verdictV2 = await _verdictBuilder.BuildAsync(bundleV2.ToEvidencePack(), bundleV2.PolicyLock);
|
||||
|
||||
// Act
|
||||
var delta = await _verdictBuilder.DiffAsync(verdictV1.CgsHash, verdictV2.CgsHash);
|
||||
|
||||
// Assert
|
||||
delta.AddedVulns.Should().Contain("CVE-2024-NEW");
|
||||
delta.RemovedVulns.Should().Contain("CVE-2024-FIXED");
|
||||
delta.StatusChanges.Should().Contain(c =>
|
||||
c.Cve == "CVE-2024-CHANGED" &&
|
||||
c.FromStatus == VexStatus.Affected &&
|
||||
c.ToStatus == VexStatus.NotAffected);
|
||||
}
|
||||
```
|
||||
|
||||
### E2E-006: Offline Replay Test
|
||||
|
||||
```csharp
|
||||
[Trait("Category", TestCategories.AirGap)]
|
||||
public class OfflineReplayTests : NetworkIsolatedTestBase
|
||||
{
|
||||
[Fact]
|
||||
public async Task OfflineReplay_ProducesIdenticalVerdict()
|
||||
{
|
||||
// Arrange
|
||||
AssertNoNetworkCalls(); // Fail if any network access
|
||||
|
||||
var bundle = await BundleLoader.LoadAsync("fixtures/e2e/bundle-0001");
|
||||
|
||||
// Act - replay with network disabled
|
||||
var verdict = await _verdictBuilder.ReplayAsync(
|
||||
bundle.Manifest,
|
||||
CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
verdict.CgsHash.Should().Be(bundle.ExpectedVerdictHash,
|
||||
"offline replay should match online verdict");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### E2E-007: CLI Verify Command
|
||||
|
||||
```csharp
|
||||
[Fact]
|
||||
public async Task CliVerifyCommand_ValidatesBundle()
|
||||
{
|
||||
// Arrange
|
||||
var bundlePath = GetFixturePath("fixtures/e2e/bundle-0001.tar.gz");
|
||||
|
||||
// Act
|
||||
var result = await CliRunner.RunAsync("stella", "verify", "--bundle", bundlePath);
|
||||
|
||||
// Assert
|
||||
result.ExitCode.Should().Be(0);
|
||||
result.Stdout.Should().Contain("Verdict verified: sha256:");
|
||||
result.Stdout.Should().Contain("Replay: PASS");
|
||||
}
|
||||
```
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] Golden bundle produces expected verdict hash
|
||||
- [ ] Replay from bundle matches original
|
||||
- [ ] Cross-platform replay produces identical hash
|
||||
- [ ] Delta between versions correctly computed
|
||||
- [ ] DSSE signature verifies
|
||||
- [ ] Offline replay works without network
|
||||
- [ ] CLI `stella verify --bundle` functional
|
||||
|
||||
## Test Runner Configuration
|
||||
|
||||
```yaml
|
||||
# .gitea/workflows/e2e-replay.yml
|
||||
name: E2E Replay Verification
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 2 * * *' # Daily at 2 AM
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
replay-test:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-22.04, alpine-3.19, debian-bookworm]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Run E2E Replay Tests
|
||||
run: |
|
||||
dotnet test src/__Tests/E2E/ \
|
||||
--filter "Category=E2E|Category=Determinism" \
|
||||
--logger "trx;LogFileName=e2e-${{ matrix.os }}.trx"
|
||||
|
||||
- name: Verify Cross-Platform Hash
|
||||
run: |
|
||||
# Compare verdict hash from this runner to golden hash
|
||||
ACTUAL_HASH=$(cat test-output/verdict-hash.txt)
|
||||
EXPECTED_HASH=$(cat fixtures/e2e/bundle-0001/expected-verdict-hash.txt)
|
||||
if [ "$ACTUAL_HASH" != "$EXPECTED_HASH" ]; then
|
||||
echo "FAIL: Hash mismatch on ${{ matrix.os }}"
|
||||
exit 1
|
||||
fi
|
||||
```
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status |
|
||||
|----|---------------|--------|
|
||||
| DR-001 | Use real Sigstore or test keypair? | PENDING - test keypair for reproducibility |
|
||||
| DR-002 | How many golden bundles to maintain? | PENDING - start with 2 (single version + delta pair) |
|
||||
| DR-003 | Bundle format tar.gz vs directory? | PENDING - both (tar.gz for CI, directory for dev) |
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | From advisory analysis |
|
||||
260
docs/implplan/SPRINT_20251229_005_001_BE_sbom_lineage_api.md
Normal file
260
docs/implplan/SPRINT_20251229_005_001_BE_sbom_lineage_api.md
Normal file
@@ -0,0 +1,260 @@
|
||||
# SPRINT_20251229_005_001_BE_sbom_lineage_api
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 005 |
|
||||
| **MODULEID** | BE (Backend) |
|
||||
| **Topic** | SBOM Lineage API Completion |
|
||||
| **Working Directory** | `src/SbomService/` |
|
||||
| **Status** | TODO |
|
||||
|
||||
## Context
|
||||
|
||||
This sprint implements the remaining backend API endpoints for the SBOM Lineage Graph feature. The architecture is fully documented in `docs/modules/sbomservice/lineage/architecture.md` with complete interface definitions, database schema, and API contracts. The frontend UI components (~41 files) already exist but require these backend endpoints to function.
|
||||
|
||||
**Gap Analysis Summary:**
|
||||
- Architecture documentation: 100% complete
|
||||
- Database schema: Defined but needs migration
|
||||
- Repository interfaces: Defined, need implementation
|
||||
- API endpoints: 0% implemented
|
||||
- UI components: ~80% complete (needs API wiring)
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/modules/sbomservice/lineage/architecture.md` (Primary reference)
|
||||
- `docs/modules/sbomservice/architecture.md`
|
||||
- `docs/modules/vex-lens/architecture.md` (VEX consensus integration)
|
||||
- `docs/modules/excititor/architecture.md` (VEX delta source)
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Read `docs/modules/sbomservice/lineage/architecture.md` thoroughly
|
||||
- [ ] Review existing SBOM version repository patterns in `src/SbomService/__Libraries/`
|
||||
- [ ] Understand Valkey caching patterns in `src/__Libraries/StellaOps.Infrastructure.Valkey/`
|
||||
|
||||
## Delivery Tracker
|
||||
|
||||
| ID | Task | Status | Assignee | Notes |
|
||||
|----|------|--------|----------|-------|
|
||||
| LIN-001 | Create `sbom_lineage_edges` migration | TODO | | PostgreSQL migration per schema spec |
|
||||
| LIN-002 | Create `vex_deltas` migration | TODO | | PostgreSQL migration per schema spec |
|
||||
| LIN-003 | Create `sbom_verdict_links` migration | TODO | | PostgreSQL migration per schema spec |
|
||||
| LIN-004 | Implement `ISbomLineageEdgeRepository` | TODO | | EF Core repository with tenant isolation |
|
||||
| LIN-005 | Implement `IVexDeltaRepository` | TODO | | EF Core repository per architecture |
|
||||
| LIN-006 | Implement `ISbomVerdictLinkRepository` | TODO | | Links SBOM versions to VEX consensus |
|
||||
| LIN-007 | Implement `ILineageGraphService` | TODO | | Orchestrates queries, caches results |
|
||||
| LIN-008 | Add `GET /api/v1/lineage/{artifactDigest}` | TODO | | Returns lineage DAG with nodes/edges |
|
||||
| LIN-009 | Add `GET /api/v1/lineage/diff` | TODO | | SBOM + VEX + reachability diffs |
|
||||
| LIN-010 | Add `POST /api/v1/lineage/export` | TODO | | Evidence pack generation with signing |
|
||||
| LIN-011 | Implement Valkey hover card cache | TODO | | Key: `lineage:hover:{tenantId}:{digest}` TTL:5m |
|
||||
| LIN-012 | Implement Valkey compare cache | TODO | | Key: `lineage:compare:{tenantId}:{a}:{b}` TTL:10m |
|
||||
| LIN-013 | Add determinism tests for node/edge ordering | TODO | | Golden file tests |
|
||||
|
||||
## Technical Design
|
||||
|
||||
### Repository Implementations
|
||||
|
||||
```csharp
|
||||
// Location: src/SbomService/__Libraries/StellaOps.SbomService.Lineage/Repositories/
|
||||
|
||||
public sealed class SbomLineageEdgeRepository : ISbomLineageEdgeRepository
|
||||
{
|
||||
private readonly SbomDbContext _db;
|
||||
private readonly ILogger<SbomLineageEdgeRepository> _logger;
|
||||
|
||||
public async ValueTask<LineageGraph> GetGraphAsync(
|
||||
string artifactDigest,
|
||||
Guid tenantId,
|
||||
int maxDepth,
|
||||
CancellationToken ct)
|
||||
{
|
||||
// BFS traversal with depth limit
|
||||
// Deterministic ordering: edges sorted by (from, to, relationship) ordinal
|
||||
var visited = new HashSet<string>();
|
||||
var queue = new Queue<(string Digest, int Depth)>();
|
||||
queue.Enqueue((artifactDigest, 0));
|
||||
|
||||
var nodes = new List<LineageNode>();
|
||||
var edges = new List<LineageEdge>();
|
||||
|
||||
while (queue.Count > 0)
|
||||
{
|
||||
var (current, depth) = queue.Dequeue();
|
||||
if (depth > maxDepth || !visited.Add(current)) continue;
|
||||
|
||||
var node = await GetNodeAsync(current, tenantId, ct);
|
||||
if (node != null) nodes.Add(node);
|
||||
|
||||
var children = await GetChildrenAsync(current, tenantId, ct);
|
||||
var parents = await GetParentsAsync(current, tenantId, ct);
|
||||
|
||||
edges.AddRange(children);
|
||||
edges.AddRange(parents);
|
||||
|
||||
foreach (var edge in children)
|
||||
queue.Enqueue((edge.ChildDigest, depth + 1));
|
||||
foreach (var edge in parents)
|
||||
queue.Enqueue((edge.ParentDigest, depth + 1));
|
||||
}
|
||||
|
||||
// Deterministic ordering
|
||||
return new LineageGraph(
|
||||
Nodes: nodes.OrderBy(n => n.SequenceNumber).ThenBy(n => n.CreatedAt).ToList(),
|
||||
Edges: edges
|
||||
.OrderBy(e => e.ParentDigest, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.ChildDigest, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.Relationship)
|
||||
.Distinct()
|
||||
.ToList()
|
||||
);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### API Controller
|
||||
|
||||
```csharp
|
||||
// Location: src/SbomService/StellaOps.SbomService.WebService/Controllers/LineageController.cs
|
||||
|
||||
[ApiController]
|
||||
[Route("api/v1/lineage")]
|
||||
[Authorize(Policy = "sbom:read")]
|
||||
public sealed class LineageController : ControllerBase
|
||||
{
|
||||
private readonly ILineageGraphService _lineageService;
|
||||
private readonly ITenantContext _tenantContext;
|
||||
|
||||
[HttpGet("{artifactDigest}")]
|
||||
[ProducesResponseType<LineageGraphResponse>(200)]
|
||||
[ProducesResponseType(404)]
|
||||
public async Task<IActionResult> GetLineage(
|
||||
string artifactDigest,
|
||||
[FromQuery] int maxDepth = 10,
|
||||
[FromQuery] bool includeVerdicts = true,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var options = new LineageQueryOptions(maxDepth, includeVerdicts, IncludeBadges: true);
|
||||
var result = await _lineageService.GetLineageAsync(
|
||||
artifactDigest,
|
||||
_tenantContext.TenantId,
|
||||
options,
|
||||
ct);
|
||||
|
||||
if (result.Nodes.Count == 0)
|
||||
return NotFound(new { error = "LINEAGE_NOT_FOUND" });
|
||||
|
||||
return Ok(result);
|
||||
}
|
||||
|
||||
[HttpGet("diff")]
|
||||
[ProducesResponseType<LineageDiffResponse>(200)]
|
||||
[ProducesResponseType(400)]
|
||||
public async Task<IActionResult> GetDiff(
|
||||
[FromQuery] string from,
|
||||
[FromQuery] string to,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
if (from == to)
|
||||
return BadRequest(new { error = "LINEAGE_DIFF_INVALID" });
|
||||
|
||||
var result = await _lineageService.GetDiffAsync(
|
||||
from, to, _tenantContext.TenantId, ct);
|
||||
|
||||
return Ok(result);
|
||||
}
|
||||
|
||||
[HttpPost("export")]
|
||||
[Authorize(Policy = "lineage:export")]
|
||||
[ProducesResponseType<ExportResponse>(200)]
|
||||
[ProducesResponseType(413)]
|
||||
public async Task<IActionResult> Export(
|
||||
[FromBody] ExportRequest request,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
// Size limit check
|
||||
// Generate signed evidence pack
|
||||
// Return download URL with expiry
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Database Migrations
|
||||
|
||||
```sql
|
||||
-- Migration: 20251229_001_CreateLineageTables.sql
|
||||
|
||||
CREATE TABLE sbom_lineage_edges (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
parent_digest TEXT NOT NULL,
|
||||
child_digest TEXT NOT NULL,
|
||||
relationship TEXT NOT NULL CHECK (relationship IN ('parent', 'build', 'base')),
|
||||
tenant_id UUID NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
UNIQUE (parent_digest, child_digest, tenant_id)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_lineage_edges_parent ON sbom_lineage_edges(parent_digest, tenant_id);
|
||||
CREATE INDEX idx_lineage_edges_child ON sbom_lineage_edges(child_digest, tenant_id);
|
||||
CREATE INDEX idx_lineage_edges_created ON sbom_lineage_edges(tenant_id, created_at DESC);
|
||||
|
||||
CREATE TABLE vex_deltas (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
from_artifact_digest TEXT NOT NULL,
|
||||
to_artifact_digest TEXT NOT NULL,
|
||||
cve TEXT NOT NULL,
|
||||
from_status TEXT NOT NULL,
|
||||
to_status TEXT NOT NULL,
|
||||
rationale JSONB NOT NULL DEFAULT '{}',
|
||||
replay_hash TEXT NOT NULL,
|
||||
attestation_digest TEXT,
|
||||
tenant_id UUID NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
UNIQUE (from_artifact_digest, to_artifact_digest, cve, tenant_id)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_vex_deltas_to ON vex_deltas(to_artifact_digest, tenant_id);
|
||||
CREATE INDEX idx_vex_deltas_cve ON vex_deltas(cve, tenant_id);
|
||||
CREATE INDEX idx_vex_deltas_created ON vex_deltas(tenant_id, created_at DESC);
|
||||
|
||||
CREATE TABLE sbom_verdict_links (
|
||||
sbom_version_id UUID NOT NULL,
|
||||
cve TEXT NOT NULL,
|
||||
consensus_projection_id UUID NOT NULL,
|
||||
verdict_status TEXT NOT NULL,
|
||||
confidence_score DECIMAL(5,4) NOT NULL,
|
||||
tenant_id UUID NOT NULL,
|
||||
linked_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
PRIMARY KEY (sbom_version_id, cve, tenant_id)
|
||||
);
|
||||
|
||||
CREATE INDEX idx_verdict_links_cve ON sbom_verdict_links(cve, tenant_id);
|
||||
CREATE INDEX idx_verdict_links_projection ON sbom_verdict_links(consensus_projection_id);
|
||||
```
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] All 3 database tables created with proper indexes
|
||||
- [ ] `GET /api/v1/lineage/{digest}` returns DAG in <200ms (cached)
|
||||
- [ ] `GET /api/v1/lineage/diff` returns deterministic diff structure
|
||||
- [ ] Hover card cache achieves <150ms response time
|
||||
- [ ] Node ordering is stable (sequenceNumber DESC, createdAt DESC)
|
||||
- [ ] Edge ordering is deterministic (lexicographic on from/to/relationship)
|
||||
- [ ] Golden file tests confirm identical JSON output across runs
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status |
|
||||
|----|---------------|--------|
|
||||
| DR-001 | Use existing Valkey infrastructure vs dedicated cache | DECIDED: Use existing |
|
||||
| DR-002 | Evidence pack size limit (currently 50MB proposed) | PENDING |
|
||||
| DR-003 | Include reachability diff in export? | PENDING |
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | Gap analysis confirmed API endpoints missing |
|
||||
|
||||
266
docs/implplan/SPRINT_20251229_005_002_CONCEL_astra_connector.md
Normal file
266
docs/implplan/SPRINT_20251229_005_002_CONCEL_astra_connector.md
Normal file
@@ -0,0 +1,266 @@
|
||||
# SPRINT_20251229_005_002_CONCEL_astra_connector
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 005 |
|
||||
| **MODULEID** | CONCEL (Concelier) |
|
||||
| **Topic** | Astra Linux Advisory Connector |
|
||||
| **Working Directory** | `src/Concelier/` |
|
||||
| **Status** | TODO |
|
||||
|
||||
## Context
|
||||
|
||||
This sprint implements the Astra Linux advisory connector - the **only major gap** identified in the cross-distro vulnerability intelligence analysis. All other distro connectors (RedHat, SUSE, Ubuntu, Debian, Alpine) are already implemented.
|
||||
|
||||
**Gap Analysis Summary:**
|
||||
- RedHat CSAF connector: ✅ 100% complete
|
||||
- SUSE CSAF connector: ✅ 100% complete
|
||||
- Ubuntu USN connector: ✅ 100% complete
|
||||
- Debian DSA connector: ✅ 100% complete
|
||||
- Alpine SecDB connector: ✅ 100% complete
|
||||
- **Astra Linux connector: ❌ 0% (this sprint)**
|
||||
|
||||
**Astra Linux Context:**
|
||||
- Russian domestic Linux distribution based on Debian
|
||||
- FSTEC certified (Russian security certification)
|
||||
- Advisory source: `https://astra.group/security/` or equivalent CSAF endpoint
|
||||
- Version comparator: Uses dpkg EVR (inherits from Debian)
|
||||
- Target markets: Russian government, defense, critical infrastructure
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/modules/concelier/architecture.md`
|
||||
- `src/Concelier/__Connectors/StellaOps.Concelier.Connector.Debian/` (base pattern)
|
||||
- `src/Concelier/__Connectors/StellaOps.Concelier.Connector.RedHat/` (CSAF pattern)
|
||||
- Existing version comparator: `src/__Libraries/StellaOps.VersionComparison/Comparers/DebianVersionComparer.cs`
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] Identify Astra Linux official advisory feed URL/format
|
||||
- [ ] Confirm whether Astra uses CSAF 2.0 or custom format
|
||||
- [ ] Review Debian connector implementation patterns
|
||||
- [ ] Understand AOC (Aggregation-Only Contract) constraints
|
||||
|
||||
## Delivery Tracker
|
||||
|
||||
| ID | Task | Status | Assignee | Notes |
|
||||
|----|------|--------|----------|-------|
|
||||
| ASTRA-001 | Research Astra Linux advisory feed format | TODO | | CSAF vs custom HTML/JSON |
|
||||
| ASTRA-002 | Create `StellaOps.Concelier.Connector.Astra` project | TODO | | Follow existing connector patterns |
|
||||
| ASTRA-003 | Implement `IAstraAdvisorySource` interface | TODO | | Fetch from official endpoint |
|
||||
| ASTRA-004 | Implement advisory parser | TODO | | CSAF or custom format parsing |
|
||||
| ASTRA-005 | Implement `AstraVersionMatcher` | TODO | | Likely dpkg EVR, verify |
|
||||
| ASTRA-006 | Add package name normalization | TODO | | Astra-specific naming conventions |
|
||||
| ASTRA-007 | Create `astra.yaml` connector config | TODO | | Air-gap compatible |
|
||||
| ASTRA-008 | Implement `IAstraObservationMapper` | TODO | | Map to AdvisoryObservation |
|
||||
| ASTRA-009 | Add trust vector configuration | TODO | | Provenance/Coverage/Replayability |
|
||||
| ASTRA-010 | Add integration tests | TODO | | Mock feed tests |
|
||||
| ASTRA-011 | Add sample advisory corpus | TODO | | Golden file validation |
|
||||
| ASTRA-012 | Document connector in module dossier | TODO | | Update architecture.md |
|
||||
|
||||
## Technical Design
|
||||
|
||||
### Project Structure
|
||||
|
||||
```
|
||||
src/Concelier/__Connectors/StellaOps.Concelier.Connector.Astra/
|
||||
├── AstraAdvisorySource.cs # IAdvisorySource implementation
|
||||
├── AstraAdvisoryParser.cs # CSAF/custom format parser
|
||||
├── AstraVersionMatcher.cs # dpkg EVR with Astra specifics
|
||||
├── AstraPackageNormalizer.cs # Astra package naming
|
||||
├── AstraObservationMapper.cs # AdvisoryObservation mapping
|
||||
├── AstraTrustConfig.cs # Trust vector defaults
|
||||
├── Models/
|
||||
│ ├── AstraAdvisory.cs # Parsed advisory record
|
||||
│ └── AstraPackage.cs # Package reference
|
||||
└── Configuration/
|
||||
└── AstraConnectorOptions.cs # Connection settings
|
||||
```
|
||||
|
||||
### Interface Implementation
|
||||
|
||||
```csharp
|
||||
// Location: src/Concelier/__Connectors/StellaOps.Concelier.Connector.Astra/AstraAdvisorySource.cs
|
||||
|
||||
public sealed class AstraAdvisorySource : IAdvisorySource
|
||||
{
|
||||
public string SourceId => "astra";
|
||||
public string DisplayName => "Astra Linux Security";
|
||||
public DistroFamily DistroFamily => DistroFamily.Debian; // Based on Debian
|
||||
|
||||
private readonly IAstraClient _client;
|
||||
private readonly AstraAdvisoryParser _parser;
|
||||
private readonly ILogger<AstraAdvisorySource> _logger;
|
||||
|
||||
public async IAsyncEnumerable<AdvisoryObservation> FetchAsync(
|
||||
FetchOptions options,
|
||||
[EnumeratorCancellation] CancellationToken ct)
|
||||
{
|
||||
// Fetch from Astra advisory endpoint
|
||||
var advisories = await _client.GetAdvisoriesAsync(options.Since, ct);
|
||||
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
var parsed = _parser.Parse(advisory);
|
||||
foreach (var observation in MapToObservations(parsed))
|
||||
{
|
||||
yield return observation;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async ValueTask<AdvisoryObservation?> GetByIdAsync(
|
||||
string advisoryId,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var advisory = await _client.GetAdvisoryAsync(advisoryId, ct);
|
||||
if (advisory == null) return null;
|
||||
|
||||
var parsed = _parser.Parse(advisory);
|
||||
return MapToObservations(parsed).FirstOrDefault();
|
||||
}
|
||||
|
||||
private IEnumerable<AdvisoryObservation> MapToObservations(AstraAdvisory advisory)
|
||||
{
|
||||
foreach (var cve in advisory.Cves)
|
||||
{
|
||||
foreach (var pkg in advisory.AffectedPackages)
|
||||
{
|
||||
yield return new AdvisoryObservation
|
||||
{
|
||||
SourceId = SourceId,
|
||||
AdvisoryId = advisory.Id,
|
||||
Cve = cve,
|
||||
PackageName = _normalizer.Normalize(pkg.Name),
|
||||
AffectedVersions = pkg.AffectedVersions,
|
||||
FixedVersion = pkg.FixedVersion,
|
||||
Severity = advisory.Severity,
|
||||
TrustVector = _trustConfig.DefaultVector,
|
||||
ObservedAt = DateTimeOffset.UtcNow,
|
||||
RawPayload = advisory.RawJson
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Version Matcher (Debian EVR Inheritance)
|
||||
|
||||
```csharp
|
||||
// Location: src/Concelier/__Connectors/StellaOps.Concelier.Connector.Astra/AstraVersionMatcher.cs
|
||||
|
||||
public sealed class AstraVersionMatcher : IVersionMatcher
|
||||
{
|
||||
private readonly DebianVersionComparer _debianComparer;
|
||||
|
||||
public AstraVersionMatcher()
|
||||
{
|
||||
// Astra uses dpkg EVR format (epoch:version-release)
|
||||
_debianComparer = new DebianVersionComparer();
|
||||
}
|
||||
|
||||
public bool IsAffected(string installedVersion, VersionConstraint constraint)
|
||||
{
|
||||
// Delegate to Debian EVR comparison
|
||||
return constraint.Type switch
|
||||
{
|
||||
ConstraintType.LessThan =>
|
||||
_debianComparer.Compare(installedVersion, constraint.Version) < 0,
|
||||
ConstraintType.LessThanOrEqual =>
|
||||
_debianComparer.Compare(installedVersion, constraint.Version) <= 0,
|
||||
ConstraintType.Equal =>
|
||||
_debianComparer.Compare(installedVersion, constraint.Version) == 0,
|
||||
ConstraintType.Range =>
|
||||
IsInRange(installedVersion, constraint),
|
||||
_ => false
|
||||
};
|
||||
}
|
||||
|
||||
public bool IsFixed(string installedVersion, string? fixedVersion)
|
||||
{
|
||||
if (fixedVersion == null) return false;
|
||||
return _debianComparer.Compare(installedVersion, fixedVersion) >= 0;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Trust Configuration
|
||||
|
||||
```csharp
|
||||
// Location: src/Concelier/__Connectors/StellaOps.Concelier.Connector.Astra/AstraTrustConfig.cs
|
||||
|
||||
public sealed class AstraTrustConfig
|
||||
{
|
||||
// Tier 1 - Official distro advisory source
|
||||
public TrustVector DefaultVector => new(
|
||||
Provenance: 0.95m, // Official FSTEC-certified source
|
||||
Coverage: 0.90m, // Comprehensive for Astra packages
|
||||
Replayability: 0.85m // Deterministic advisory format
|
||||
);
|
||||
|
||||
public static readonly TrustVector MinimumAcceptable = new(
|
||||
Provenance: 0.70m,
|
||||
Coverage: 0.60m,
|
||||
Replayability: 0.50m
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Connector Configuration
|
||||
|
||||
```yaml
|
||||
# etc/connectors/astra.yaml
|
||||
connector:
|
||||
id: astra
|
||||
display_name: Astra Linux Security
|
||||
enabled: true
|
||||
|
||||
source:
|
||||
base_url: https://astra.group/security/csaf/ # Or actual endpoint
|
||||
format: csaf # or custom
|
||||
auth:
|
||||
type: none # or api_key if required
|
||||
rate_limit:
|
||||
requests_per_minute: 60
|
||||
|
||||
trust:
|
||||
provenance: 0.95
|
||||
coverage: 0.90
|
||||
replayability: 0.85
|
||||
|
||||
offline:
|
||||
bundle_path: /var/lib/stellaops/feeds/astra/
|
||||
update_frequency: daily
|
||||
```
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] Connector fetches advisories from Astra Linux source
|
||||
- [ ] dpkg EVR version comparison works correctly
|
||||
- [ ] Advisories map to AdvisoryObservation with proper trust vectors
|
||||
- [ ] Air-gap mode works with bundled advisory feeds
|
||||
- [ ] Integration tests pass with mock feed data
|
||||
- [ ] Documentation updated in `docs/modules/concelier/architecture.md`
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status |
|
||||
|----|---------------|--------|
|
||||
| DR-001 | Astra advisory feed format (CSAF vs custom) | PENDING - Requires research |
|
||||
| DR-002 | Authentication requirements for Astra feed | PENDING |
|
||||
| DR-003 | Astra package naming conventions | PENDING - Verify against Debian |
|
||||
| DR-004 | Feed availability in air-gapped environments | PENDING - Offline bundle strategy |
|
||||
| DR-005 | FSTEC compliance documentation requirements | PENDING |
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | Only missing distro connector identified |
|
||||
|
||||
344
docs/implplan/SPRINT_20251229_005_003_FE_lineage_ui_wiring.md
Normal file
344
docs/implplan/SPRINT_20251229_005_003_FE_lineage_ui_wiring.md
Normal file
@@ -0,0 +1,344 @@
|
||||
# SPRINT_20251229_005_003_FE_lineage_ui_wiring
|
||||
|
||||
## Sprint Overview
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **IMPLID** | 20251229 |
|
||||
| **BATCHID** | 005 |
|
||||
| **MODULEID** | FE (Frontend) |
|
||||
| **Topic** | Lineage UI API Wiring |
|
||||
| **Working Directory** | `src/Web/StellaOps.Web/` |
|
||||
| **Status** | TODO |
|
||||
| **Depends On** | SPRINT_20251229_005_001_BE_sbom_lineage_api |
|
||||
|
||||
## Context
|
||||
|
||||
This sprint wires the existing SBOM Lineage Graph UI components (~41 files) to the backend API endpoints created in Sprint 005_001. The UI components are substantially complete but currently use mock data or incomplete service stubs.
|
||||
|
||||
**Gap Analysis Summary:**
|
||||
- UI Components: ~80% complete (41 files in `src/app/features/lineage/`)
|
||||
- Services: Stubs exist, need real API calls
|
||||
- State management: Partially implemented
|
||||
- Hover card interactions: UI complete, needs data binding
|
||||
|
||||
**Key UI Files Already Implemented:**
|
||||
- `lineage-graph.component.ts` - Main DAG visualization (1000+ LOC)
|
||||
- `lineage-hover-card.component.ts` - Hover interactions
|
||||
- `lineage-sbom-diff.component.ts` - SBOM delta display
|
||||
- `lineage-vex-diff.component.ts` - VEX status changes
|
||||
- `lineage-compare-panel.component.ts` - Side-by-side comparison
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/modules/sbomservice/lineage/architecture.md` (API contracts)
|
||||
- `docs/modules/web/architecture.md`
|
||||
- SPRINT_20251229_005_001_BE_sbom_lineage_api (Backend prerequisite)
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [ ] SPRINT_20251229_005_001_BE_sbom_lineage_api completed
|
||||
- [ ] Backend API endpoints deployed to dev environment
|
||||
- [ ] Review existing lineage components in `src/app/features/lineage/`
|
||||
|
||||
## Delivery Tracker
|
||||
|
||||
| ID | Task | Status | Assignee | Notes |
|
||||
|----|------|--------|----------|-------|
|
||||
| UI-001 | Update `LineageService` with real API calls | TODO | | Replace mock data |
|
||||
| UI-002 | Wire `GET /lineage/{digest}` to graph component | TODO | | Load DAG data |
|
||||
| UI-003 | Wire `GET /lineage/diff` to compare panel | TODO | | SBOM + VEX diffs |
|
||||
| UI-004 | Implement hover card data loading | TODO | | Observable streams |
|
||||
| UI-005 | Add error states and loading indicators | TODO | | UX polish |
|
||||
| UI-006 | Implement export button with `POST /lineage/export` | TODO | | Download flow |
|
||||
| UI-007 | Add caching layer in service | TODO | | Match backend TTLs |
|
||||
| UI-008 | Update OpenAPI client generation | TODO | | Regenerate from spec |
|
||||
| UI-009 | Add E2E tests for lineage flow | TODO | | Cypress/Playwright |
|
||||
|
||||
## Technical Design
|
||||
|
||||
### Service Implementation
|
||||
|
||||
```typescript
|
||||
// Location: src/Web/StellaOps.Web/src/app/features/lineage/services/lineage.service.ts
|
||||
|
||||
import { Injectable, inject } from '@angular/core';
|
||||
import { HttpClient } from '@angular/common/http';
|
||||
import { Observable, shareReplay, map } from 'rxjs';
|
||||
import { environment } from '@environments/environment';
|
||||
|
||||
export interface LineageNode {
|
||||
id: string;
|
||||
digest: string;
|
||||
artifactRef: string;
|
||||
sequenceNumber: number;
|
||||
createdAt: string;
|
||||
source: string;
|
||||
badges: {
|
||||
newVulns: number;
|
||||
resolvedVulns: number;
|
||||
signatureStatus: 'valid' | 'invalid' | 'unknown';
|
||||
};
|
||||
replayHash: string;
|
||||
}
|
||||
|
||||
export interface LineageEdge {
|
||||
from: string;
|
||||
to: string;
|
||||
relationship: 'parent' | 'build' | 'base';
|
||||
}
|
||||
|
||||
export interface LineageGraphResponse {
|
||||
artifact: string;
|
||||
nodes: LineageNode[];
|
||||
edges: LineageEdge[];
|
||||
}
|
||||
|
||||
export interface LineageDiffResponse {
|
||||
sbomDiff: {
|
||||
added: ComponentDiff[];
|
||||
removed: ComponentDiff[];
|
||||
versionChanged: VersionChange[];
|
||||
};
|
||||
vexDiff: VexChange[];
|
||||
reachabilityDiff: ReachabilityChange[];
|
||||
replayHash: string;
|
||||
}
|
||||
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class LineageService {
|
||||
private readonly http = inject(HttpClient);
|
||||
private readonly baseUrl = `${environment.apiUrl}/api/v1/lineage`;
|
||||
|
||||
// Cache for hover cards (matches backend 5-minute TTL)
|
||||
private readonly graphCache = new Map<string, Observable<LineageGraphResponse>>();
|
||||
|
||||
getLineage(artifactDigest: string, options?: {
|
||||
maxDepth?: number;
|
||||
includeVerdicts?: boolean;
|
||||
}): Observable<LineageGraphResponse> {
|
||||
const cacheKey = `${artifactDigest}:${options?.maxDepth ?? 10}`;
|
||||
|
||||
if (!this.graphCache.has(cacheKey)) {
|
||||
const params = new URLSearchParams();
|
||||
if (options?.maxDepth) params.set('maxDepth', options.maxDepth.toString());
|
||||
if (options?.includeVerdicts !== undefined) {
|
||||
params.set('includeVerdicts', options.includeVerdicts.toString());
|
||||
}
|
||||
|
||||
const url = `${this.baseUrl}/${encodeURIComponent(artifactDigest)}?${params}`;
|
||||
this.graphCache.set(cacheKey, this.http.get<LineageGraphResponse>(url).pipe(
|
||||
shareReplay({ bufferSize: 1, refCount: true, windowTime: 5 * 60 * 1000 })
|
||||
));
|
||||
}
|
||||
|
||||
return this.graphCache.get(cacheKey)!;
|
||||
}
|
||||
|
||||
getDiff(fromDigest: string, toDigest: string): Observable<LineageDiffResponse> {
|
||||
const params = new URLSearchParams({ from: fromDigest, to: toDigest });
|
||||
return this.http.get<LineageDiffResponse>(`${this.baseUrl}/diff?${params}`);
|
||||
}
|
||||
|
||||
export(artifactDigests: string[], options?: {
|
||||
includeAttestations?: boolean;
|
||||
sign?: boolean;
|
||||
}): Observable<{ downloadUrl: string; bundleDigest: string; expiresAt: string }> {
|
||||
return this.http.post<{
|
||||
downloadUrl: string;
|
||||
bundleDigest: string;
|
||||
expiresAt: string;
|
||||
}>(`${this.baseUrl}/export`, {
|
||||
artifactDigests,
|
||||
includeAttestations: options?.includeAttestations ?? true,
|
||||
sign: options?.sign ?? true
|
||||
});
|
||||
}
|
||||
|
||||
clearCache(): void {
|
||||
this.graphCache.clear();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Component Wiring
|
||||
|
||||
```typescript
|
||||
// Location: src/Web/StellaOps.Web/src/app/features/lineage/components/lineage-graph.component.ts
|
||||
// Updates to existing component
|
||||
|
||||
import { Component, inject, Input, OnInit, signal, computed } from '@angular/core';
|
||||
import { toSignal } from '@angular/core/rxjs-interop';
|
||||
import { LineageService, LineageGraphResponse, LineageNode } from '../services/lineage.service';
|
||||
import { catchError, of, switchMap, tap } from 'rxjs';
|
||||
|
||||
@Component({
|
||||
selector: 'app-lineage-graph',
|
||||
// ... existing template
|
||||
})
|
||||
export class LineageGraphComponent implements OnInit {
|
||||
private readonly lineageService = inject(LineageService);
|
||||
|
||||
@Input({ required: true }) artifactDigest!: string;
|
||||
@Input() maxDepth = 10;
|
||||
|
||||
// Reactive state
|
||||
readonly loading = signal(true);
|
||||
readonly error = signal<string | null>(null);
|
||||
readonly graphData = signal<LineageGraphResponse | null>(null);
|
||||
|
||||
// Computed values for template
|
||||
readonly nodes = computed(() => this.graphData()?.nodes ?? []);
|
||||
readonly edges = computed(() => this.graphData()?.edges ?? []);
|
||||
readonly hasData = computed(() => this.nodes().length > 0);
|
||||
|
||||
// Hover state
|
||||
readonly hoveredNode = signal<LineageNode | null>(null);
|
||||
|
||||
ngOnInit(): void {
|
||||
this.loadGraph();
|
||||
}
|
||||
|
||||
private loadGraph(): void {
|
||||
this.loading.set(true);
|
||||
this.error.set(null);
|
||||
|
||||
this.lineageService.getLineage(this.artifactDigest, {
|
||||
maxDepth: this.maxDepth,
|
||||
includeVerdicts: true
|
||||
}).pipe(
|
||||
tap(data => {
|
||||
this.graphData.set(data);
|
||||
this.loading.set(false);
|
||||
}),
|
||||
catchError(err => {
|
||||
this.error.set(err.status === 404
|
||||
? 'Artifact not found in lineage graph'
|
||||
: 'Failed to load lineage data');
|
||||
this.loading.set(false);
|
||||
return of(null);
|
||||
})
|
||||
).subscribe();
|
||||
}
|
||||
|
||||
onNodeHover(node: LineageNode | null): void {
|
||||
this.hoveredNode.set(node);
|
||||
}
|
||||
|
||||
onNodeClick(node: LineageNode): void {
|
||||
// Navigate to compare view or artifact detail
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Hover Card Integration
|
||||
|
||||
```typescript
|
||||
// Location: src/Web/StellaOps.Web/src/app/features/lineage/components/lineage-hover-card.component.ts
|
||||
// Updates to existing component
|
||||
|
||||
import { Component, Input, inject, computed } from '@angular/core';
|
||||
import { LineageNode } from '../services/lineage.service';
|
||||
|
||||
@Component({
|
||||
selector: 'app-lineage-hover-card',
|
||||
template: `
|
||||
@if (node) {
|
||||
<div class="hover-card" [style.left.px]="position.x" [style.top.px]="position.y">
|
||||
<header class="hover-card__header">
|
||||
<span class="artifact-ref">{{ node.artifactRef }}</span>
|
||||
<span class="sequence">#{{ node.sequenceNumber }}</span>
|
||||
</header>
|
||||
|
||||
<section class="hover-card__badges">
|
||||
@if (node.badges.newVulns > 0) {
|
||||
<span class="badge badge--danger">
|
||||
{{ node.badges.newVulns }} new vulns
|
||||
</span>
|
||||
}
|
||||
@if (node.badges.resolvedVulns > 0) {
|
||||
<span class="badge badge--success">
|
||||
{{ node.badges.resolvedVulns }} resolved
|
||||
</span>
|
||||
}
|
||||
<span class="badge" [class.badge--success]="node.badges.signatureStatus === 'valid'"
|
||||
[class.badge--danger]="node.badges.signatureStatus === 'invalid'">
|
||||
{{ signatureLabel() }}
|
||||
</span>
|
||||
</section>
|
||||
|
||||
<section class="hover-card__meta">
|
||||
<div class="meta-row">
|
||||
<span class="label">Created:</span>
|
||||
<span class="value">{{ node.createdAt | date:'short' }}</span>
|
||||
</div>
|
||||
<div class="meta-row">
|
||||
<span class="label">Source:</span>
|
||||
<span class="value">{{ node.source }}</span>
|
||||
</div>
|
||||
<div class="meta-row">
|
||||
<span class="label">Replay Hash:</span>
|
||||
<code class="value hash">{{ truncatedHash() }}</code>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<footer class="hover-card__actions">
|
||||
<button class="btn btn--sm" (click)="onCompare()">Compare</button>
|
||||
<button class="btn btn--sm btn--outline" (click)="onViewDetails()">Details</button>
|
||||
</footer>
|
||||
</div>
|
||||
}
|
||||
`
|
||||
})
|
||||
export class LineageHoverCardComponent {
|
||||
@Input() node: LineageNode | null = null;
|
||||
@Input() position = { x: 0, y: 0 };
|
||||
|
||||
readonly signatureLabel = computed(() => {
|
||||
switch (this.node?.badges.signatureStatus) {
|
||||
case 'valid': return '✓ Signed';
|
||||
case 'invalid': return '✗ Invalid';
|
||||
default: return '? Unknown';
|
||||
}
|
||||
});
|
||||
|
||||
readonly truncatedHash = computed(() => {
|
||||
const hash = this.node?.replayHash ?? '';
|
||||
return hash.length > 16 ? `${hash.substring(0, 8)}...${hash.substring(hash.length - 8)}` : hash;
|
||||
});
|
||||
|
||||
onCompare(): void {
|
||||
// Emit event to parent for compare mode
|
||||
}
|
||||
|
||||
onViewDetails(): void {
|
||||
// Navigate to artifact detail page
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Success Criteria
|
||||
|
||||
- [ ] Graph loads real data from backend API
|
||||
- [ ] Hover cards display live vulnerability badges
|
||||
- [ ] Compare panel shows accurate SBOM/VEX diffs
|
||||
- [ ] Export button triggers download with signed bundle
|
||||
- [ ] Loading states display during API calls
|
||||
- [ ] Error states show meaningful messages
|
||||
- [ ] Cache prevents redundant API calls
|
||||
- [ ] E2E tests pass for complete lineage flow
|
||||
|
||||
## Decisions & Risks
|
||||
|
||||
| ID | Decision/Risk | Status |
|
||||
|----|---------------|--------|
|
||||
| DR-001 | Use Angular signals vs RxJS for component state | DECIDED: Signals |
|
||||
| DR-002 | Client-side caching strategy alignment with backend TTLs | DECIDED: Match 5m/10m |
|
||||
| DR-003 | Graph rendering library (existing D3 vs alternatives) | DECIDED: Keep existing |
|
||||
|
||||
## Execution Log
|
||||
|
||||
| Date | Action | Notes |
|
||||
|------|--------|-------|
|
||||
| 2025-12-29 | Sprint created | Depends on BE API completion |
|
||||
|
||||
950
docs/modules/ui/LINEAGE_SMARTDIFF_UI_GUIDE.md
Normal file
950
docs/modules/ui/LINEAGE_SMARTDIFF_UI_GUIDE.md
Normal file
@@ -0,0 +1,950 @@
|
||||
# Smart-Diff & SBOM Lineage Graph - UI Implementation Guide
|
||||
|
||||
## Overview
|
||||
|
||||
This document provides comprehensive guidance for implementing the Smart-Diff and SBOM Lineage Graph UI features in the StellaOps Angular frontend.
|
||||
|
||||
**Last Updated:** 2025-12-29
|
||||
**Related Sprints:** FE_003 through FE_009
|
||||
|
||||
---
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [Architecture Overview](#architecture-overview)
|
||||
2. [Existing Component Inventory](#existing-component-inventory)
|
||||
3. [Angular 17 Patterns](#angular-17-patterns)
|
||||
4. [State Management](#state-management)
|
||||
5. [Visualization Techniques](#visualization-techniques)
|
||||
6. [Styling System](#styling-system)
|
||||
7. [Testing Strategy](#testing-strategy)
|
||||
8. [Accessibility Requirements](#accessibility-requirements)
|
||||
9. [Sprint Task Reference](#sprint-task-reference)
|
||||
|
||||
---
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
### File Structure
|
||||
|
||||
```
|
||||
src/app/
|
||||
├── core/ # Global services, guards, interceptors
|
||||
│ ├── services/
|
||||
│ │ ├── delta-verdict.service.ts
|
||||
│ │ ├── audit-pack.service.ts
|
||||
│ │ └── pinned-explanation.service.ts
|
||||
│ └── api/ # API client base classes
|
||||
├── features/
|
||||
│ ├── lineage/ # Main lineage feature
|
||||
│ │ ├── components/
|
||||
│ │ │ ├── lineage-graph/ # SVG-based DAG visualization
|
||||
│ │ │ ├── lineage-node/ # Individual node rendering
|
||||
│ │ │ ├── lineage-edge/ # Bezier curve edges
|
||||
│ │ │ ├── lineage-hover-card/# Hover details
|
||||
│ │ │ ├── lineage-minimap/ # Canvas minimap
|
||||
│ │ │ ├── explainer-timeline/# Engine step visualization
|
||||
│ │ │ ├── diff-table/ # Expandable diff table
|
||||
│ │ │ ├── reachability-diff/ # Gate visualization
|
||||
│ │ │ ├── pinned-explanation/# Copy-safe snippets
|
||||
│ │ │ └── audit-pack-export/ # Export dialog
|
||||
│ │ ├── services/
|
||||
│ │ │ ├── lineage-graph.service.ts
|
||||
│ │ │ └── lineage-export.service.ts
|
||||
│ │ ├── models/
|
||||
│ │ │ └── lineage.models.ts
|
||||
│ │ └── lineage.routes.ts
|
||||
│ ├── compare/ # Comparison feature
|
||||
│ │ ├── components/
|
||||
│ │ │ ├── compare-view/ # Main comparison container
|
||||
│ │ │ ├── three-pane-layout/ # Categories/Items/Proof layout
|
||||
│ │ │ └── delta-summary-strip/
|
||||
│ │ └── services/
|
||||
│ │ └── compare.service.ts
|
||||
│ └── graph/ # Generic graph components
|
||||
├── shared/ # Reusable UI components
|
||||
│ └── components/
|
||||
│ ├── data-table/
|
||||
│ ├── badge/
|
||||
│ ├── tooltip/
|
||||
│ └── modal/
|
||||
└── styles/ # Global SCSS
|
||||
├── variables.scss
|
||||
├── mixins.scss
|
||||
└── themes/
|
||||
```
|
||||
|
||||
### Module Boundaries
|
||||
|
||||
| Module | Responsibility | Cross-Boundary Dependencies |
|
||||
|--------|----------------|----------------------------|
|
||||
| `lineage` | SBOM lineage visualization | Uses `shared` components, `compare` patterns |
|
||||
| `compare` | Delta comparison | Uses `lineage` data models |
|
||||
| `graph` | Generic graph rendering | Used by `lineage` |
|
||||
| `shared` | Reusable UI primitives | No feature dependencies |
|
||||
|
||||
---
|
||||
|
||||
## Existing Component Inventory
|
||||
|
||||
### Lineage Feature (41 files)
|
||||
|
||||
| Component | Status | Notes |
|
||||
|-----------|--------|-------|
|
||||
| `LineageGraphComponent` | ✅ Complete | SVG-based DAG with pan/zoom |
|
||||
| `LineageNodeComponent` | ✅ Complete | Node shapes, badges, selection |
|
||||
| `LineageEdgeComponent` | ✅ Complete | Bezier curves, edge types |
|
||||
| `LineageHoverCardComponent` | ✅ Complete | Node details on hover |
|
||||
| `LineageMiniMapComponent` | ✅ Complete | Canvas-based minimap |
|
||||
| `LineageControlsComponent` | ✅ Complete | Zoom, pan, reset buttons |
|
||||
| `LineageSbomDiffComponent` | ⚠️ Partial | Needs row expanders |
|
||||
| `LineageVexDiffComponent` | ⚠️ Partial | Needs gate display |
|
||||
| `LineageCompareComponent` | ⚠️ Partial | Needs explainer integration |
|
||||
| `LineageExportDialogComponent` | ⚠️ Partial | Needs audit pack format |
|
||||
| `ReplayHashDisplayComponent` | ✅ Complete | Hash display with copy |
|
||||
| `WhySafePanelComponent` | ✅ Complete | VEX justification display |
|
||||
| `ProofTreeComponent` | ⚠️ Partial | Needs confidence breakdown |
|
||||
|
||||
### Compare Feature (18 files)
|
||||
|
||||
| Component | Status | Notes |
|
||||
|-----------|--------|-------|
|
||||
| `CompareViewComponent` | ✅ Complete | Signals-based state |
|
||||
| `ThreePaneLayoutComponent` | ✅ Complete | Responsive layout |
|
||||
| `CategoriesPaneComponent` | ✅ Complete | Delta categories |
|
||||
| `ItemsPaneComponent` | ⚠️ Partial | Needs expansion |
|
||||
| `ProofPaneComponent` | ✅ Complete | Evidence display |
|
||||
| `DeltaSummaryStripComponent` | ✅ Complete | Stats header |
|
||||
| `TrustIndicatorsComponent` | ✅ Complete | Signature status |
|
||||
| `EnvelopeHashesComponent` | ✅ Complete | Attestation hashes |
|
||||
|
||||
---
|
||||
|
||||
## Angular 17 Patterns
|
||||
|
||||
### Standalone Components
|
||||
|
||||
All new components must use standalone architecture:
|
||||
|
||||
```typescript
|
||||
@Component({
|
||||
selector: 'app-explainer-step',
|
||||
standalone: true,
|
||||
imports: [CommonModule, SharedModule],
|
||||
templateUrl: './explainer-step.component.html',
|
||||
changeDetection: ChangeDetectionStrategy.OnPush
|
||||
})
|
||||
export class ExplainerStepComponent {
|
||||
// Use signals for state
|
||||
readonly expanded = signal(false);
|
||||
|
||||
// Use computed for derived state
|
||||
readonly displayText = computed(() =>
|
||||
this.expanded() ? this.fullText() : this.truncatedText()
|
||||
);
|
||||
|
||||
// Use inject() for dependencies
|
||||
private readonly service = inject(ExplainerService);
|
||||
}
|
||||
```
|
||||
|
||||
### Input/Output with Signals
|
||||
|
||||
Angular 17 signal-based inputs:
|
||||
|
||||
```typescript
|
||||
// Modern approach (preferred)
|
||||
export class MyComponent {
|
||||
// Signal input
|
||||
readonly data = input<DataType[]>([]);
|
||||
|
||||
// Required input
|
||||
readonly id = input.required<string>();
|
||||
|
||||
// Aliased input
|
||||
readonly items = input<Item[]>([], { alias: 'dataItems' });
|
||||
|
||||
// Output
|
||||
readonly selectionChange = output<Item>();
|
||||
}
|
||||
|
||||
// Template usage
|
||||
<app-my-component
|
||||
[data]="graphData()"
|
||||
[id]="nodeId"
|
||||
(selectionChange)="onSelection($event)"
|
||||
/>
|
||||
```
|
||||
|
||||
### Template Control Flow
|
||||
|
||||
Use new Angular 17 control flow syntax:
|
||||
|
||||
```typescript
|
||||
// In template
|
||||
@if (loading()) {
|
||||
<app-skeleton />
|
||||
} @else if (error()) {
|
||||
<app-error-state [message]="error()" />
|
||||
} @else {
|
||||
@for (item of items(); track item.id) {
|
||||
<app-list-item [item]="item" />
|
||||
} @empty {
|
||||
<app-empty-state />
|
||||
}
|
||||
}
|
||||
|
||||
@switch (status()) {
|
||||
@case ('success') { <app-success-badge /> }
|
||||
@case ('error') { <app-error-badge /> }
|
||||
@default { <app-pending-badge /> }
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## State Management
|
||||
|
||||
### Service-Level State with Signals
|
||||
|
||||
```typescript
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class LineageGraphService {
|
||||
// Private writable signals
|
||||
private readonly _currentGraph = signal<LineageGraph | null>(null);
|
||||
private readonly _selectedNodes = signal<Set<string>>(new Set());
|
||||
private readonly _hoverState = signal<HoverState | null>(null);
|
||||
|
||||
// Public readonly computed signals
|
||||
readonly currentGraph = this._currentGraph.asReadonly();
|
||||
readonly selectedNodes = this._selectedNodes.asReadonly();
|
||||
readonly hoverState = this._hoverState.asReadonly();
|
||||
|
||||
// Computed derived state
|
||||
readonly layoutNodes = computed(() => {
|
||||
const graph = this._currentGraph();
|
||||
if (!graph) return [];
|
||||
return this.computeLayout(graph.nodes, graph.edges);
|
||||
});
|
||||
|
||||
readonly hasSelection = computed(() =>
|
||||
this._selectedNodes().size > 0
|
||||
);
|
||||
|
||||
// Actions
|
||||
selectNode(nodeId: string, multi = false): void {
|
||||
this._selectedNodes.update(set => {
|
||||
const newSet = multi ? new Set(set) : new Set<string>();
|
||||
if (set.has(nodeId) && multi) {
|
||||
newSet.delete(nodeId);
|
||||
} else {
|
||||
newSet.add(nodeId);
|
||||
}
|
||||
return newSet;
|
||||
});
|
||||
}
|
||||
|
||||
clearSelection(): void {
|
||||
this._selectedNodes.set(new Set());
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### HTTP Data Loading Pattern
|
||||
|
||||
```typescript
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class LineageGraphService {
|
||||
private readonly http = inject(HttpClient);
|
||||
|
||||
// Caching
|
||||
private readonly cache = new Map<string, CacheEntry<LineageGraph>>();
|
||||
private readonly cacheTtlMs = 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
getLineage(artifactDigest: string, tenantId: string): Observable<LineageGraph> {
|
||||
const cacheKey = `${tenantId}:${artifactDigest}`;
|
||||
const cached = this.cache.get(cacheKey);
|
||||
|
||||
if (cached && cached.expiresAt > Date.now()) {
|
||||
return of(cached.data);
|
||||
}
|
||||
|
||||
return this.http.get<LineageGraph>(
|
||||
`/api/v1/lineage/${encodeURIComponent(artifactDigest)}`,
|
||||
{ params: { tenantId } }
|
||||
).pipe(
|
||||
tap(graph => {
|
||||
this.cache.set(cacheKey, {
|
||||
data: graph,
|
||||
expiresAt: Date.now() + this.cacheTtlMs
|
||||
});
|
||||
this._currentGraph.set(graph);
|
||||
}),
|
||||
shareReplay(1)
|
||||
);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Visualization Techniques
|
||||
|
||||
### SVG Graph Rendering
|
||||
|
||||
The lineage graph uses SVG for node/edge rendering with transform groups for pan/zoom:
|
||||
|
||||
```typescript
|
||||
@Component({
|
||||
selector: 'app-lineage-graph',
|
||||
template: `
|
||||
<svg
|
||||
#svgElement
|
||||
class="lineage-svg"
|
||||
[attr.viewBox]="viewBox()"
|
||||
(wheel)="onWheel($event)"
|
||||
(mousedown)="onMouseDown($event)"
|
||||
(mousemove)="onMouseMove($event)"
|
||||
(mouseup)="onMouseUp($event)">
|
||||
|
||||
<!-- Background grid -->
|
||||
<defs>
|
||||
<pattern id="grid" width="20" height="20" patternUnits="userSpaceOnUse">
|
||||
<path d="M 20 0 L 0 0 0 20" fill="none" stroke="#e0e0e0" stroke-width="0.5"/>
|
||||
</pattern>
|
||||
</defs>
|
||||
<rect width="100%" height="100%" fill="url(#grid)"/>
|
||||
|
||||
<!-- Transform group for pan/zoom -->
|
||||
<g [attr.transform]="transformAttr()">
|
||||
<!-- Lane backgrounds -->
|
||||
@for (lane of lanes(); track lane.index) {
|
||||
<rect
|
||||
[attr.x]="lane.x"
|
||||
[attr.y]="0"
|
||||
[attr.width]="lane.width"
|
||||
[attr.height]="graphHeight()"
|
||||
[attr.fill]="lane.index % 2 === 0 ? '#f8f9fa' : '#ffffff'"
|
||||
/>
|
||||
}
|
||||
|
||||
<!-- Edges layer (rendered first, behind nodes) -->
|
||||
<g class="edges-layer">
|
||||
@for (edge of edges; track edge.id) {
|
||||
<app-lineage-edge
|
||||
[edge]="edge"
|
||||
[sourceNode]="getNode(edge.fromDigest)"
|
||||
[targetNode]="getNode(edge.toDigest)"
|
||||
/>
|
||||
}
|
||||
</g>
|
||||
|
||||
<!-- Nodes layer -->
|
||||
<g class="nodes-layer">
|
||||
@for (node of nodes; track node.artifactDigest) {
|
||||
<app-lineage-node
|
||||
[node]="node"
|
||||
[selected]="isSelected(node)"
|
||||
[hovered]="isHovered(node)"
|
||||
(click)="onNodeClick(node, $event)"
|
||||
(mouseenter)="onNodeHover(node, $event)"
|
||||
(mouseleave)="onNodeLeave()"
|
||||
/>
|
||||
}
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
`
|
||||
})
|
||||
export class LineageGraphComponent {
|
||||
// Pan/zoom state
|
||||
readonly transform = signal({ x: 0, y: 0, scale: 1 });
|
||||
|
||||
readonly transformAttr = computed(() => {
|
||||
const t = this.transform();
|
||||
return `translate(${t.x}, ${t.y}) scale(${t.scale})`;
|
||||
});
|
||||
|
||||
// Pan handling
|
||||
private isDragging = false;
|
||||
private dragStart = { x: 0, y: 0 };
|
||||
|
||||
onMouseDown(event: MouseEvent): void {
|
||||
if (event.button === 0) { // Left click
|
||||
this.isDragging = true;
|
||||
this.dragStart = { x: event.clientX, y: event.clientY };
|
||||
}
|
||||
}
|
||||
|
||||
onMouseMove(event: MouseEvent): void {
|
||||
if (!this.isDragging) return;
|
||||
|
||||
const dx = event.clientX - this.dragStart.x;
|
||||
const dy = event.clientY - this.dragStart.y;
|
||||
|
||||
this.transform.update(t => ({
|
||||
...t,
|
||||
x: t.x + dx,
|
||||
y: t.y + dy
|
||||
}));
|
||||
|
||||
this.dragStart = { x: event.clientX, y: event.clientY };
|
||||
}
|
||||
|
||||
onMouseUp(): void {
|
||||
this.isDragging = false;
|
||||
}
|
||||
|
||||
// Zoom handling
|
||||
onWheel(event: WheelEvent): void {
|
||||
event.preventDefault();
|
||||
|
||||
const scaleFactor = event.deltaY > 0 ? 0.9 : 1.1;
|
||||
const newScale = Math.min(3, Math.max(0.1, this.transform().scale * scaleFactor));
|
||||
|
||||
this.transform.update(t => ({ ...t, scale: newScale }));
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Bezier Curve Edges
|
||||
|
||||
```typescript
|
||||
@Component({
|
||||
selector: 'app-lineage-edge',
|
||||
template: `
|
||||
<g class="edge" [class]="edgeClass">
|
||||
<path
|
||||
[attr.d]="pathData()"
|
||||
[attr.stroke]="strokeColor"
|
||||
stroke-width="2"
|
||||
fill="none"
|
||||
/>
|
||||
@if (showArrow) {
|
||||
<polygon
|
||||
[attr.points]="arrowPoints()"
|
||||
[attr.fill]="strokeColor"
|
||||
/>
|
||||
}
|
||||
</g>
|
||||
`
|
||||
})
|
||||
export class LineageEdgeComponent {
|
||||
@Input() edge!: LineageEdge;
|
||||
@Input() sourceNode!: LayoutNode;
|
||||
@Input() targetNode!: LayoutNode;
|
||||
|
||||
// Compute bezier curve path
|
||||
pathData = computed(() => {
|
||||
const src = this.sourceNode;
|
||||
const tgt = this.targetNode;
|
||||
|
||||
// Control point offset for curve
|
||||
const dx = tgt.x - src.x;
|
||||
const cpOffset = Math.min(Math.abs(dx) * 0.5, 100);
|
||||
|
||||
return `M ${src.x} ${src.y}
|
||||
C ${src.x + cpOffset} ${src.y},
|
||||
${tgt.x - cpOffset} ${tgt.y},
|
||||
${tgt.x} ${tgt.y}`;
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Canvas Minimap
|
||||
|
||||
For performance-critical rendering (many nodes), use Canvas:
|
||||
|
||||
```typescript
|
||||
@Component({
|
||||
selector: 'app-lineage-minimap',
|
||||
template: `
|
||||
<canvas
|
||||
#canvas
|
||||
class="minimap-canvas"
|
||||
(click)="onCanvasClick($event)"
|
||||
></canvas>
|
||||
`
|
||||
})
|
||||
export class LineageMinimapComponent implements AfterViewInit, OnChanges {
|
||||
@ViewChild('canvas') canvasRef!: ElementRef<HTMLCanvasElement>;
|
||||
@Input() nodes: LayoutNode[] = [];
|
||||
@Input() viewportRect?: { x: number; y: number; width: number; height: number };
|
||||
|
||||
private ctx!: CanvasRenderingContext2D;
|
||||
private resizeObserver!: ResizeObserver;
|
||||
|
||||
ngAfterViewInit(): void {
|
||||
const canvas = this.canvasRef.nativeElement;
|
||||
this.ctx = canvas.getContext('2d')!;
|
||||
|
||||
// Handle high DPI displays
|
||||
this.resizeObserver = new ResizeObserver(entries => {
|
||||
const { width, height } = entries[0].contentRect;
|
||||
canvas.width = width * window.devicePixelRatio;
|
||||
canvas.height = height * window.devicePixelRatio;
|
||||
canvas.style.width = `${width}px`;
|
||||
canvas.style.height = `${height}px`;
|
||||
this.ctx.scale(window.devicePixelRatio, window.devicePixelRatio);
|
||||
this.render();
|
||||
});
|
||||
|
||||
this.resizeObserver.observe(canvas);
|
||||
}
|
||||
|
||||
ngOnChanges(): void {
|
||||
if (this.ctx) {
|
||||
this.render();
|
||||
}
|
||||
}
|
||||
|
||||
private render(): void {
|
||||
const canvas = this.canvasRef.nativeElement;
|
||||
const { width, height } = canvas.getBoundingClientRect();
|
||||
|
||||
// Clear
|
||||
this.ctx.clearRect(0, 0, width, height);
|
||||
|
||||
// Calculate scale to fit all nodes
|
||||
const bounds = this.calculateBounds();
|
||||
const scale = Math.min(
|
||||
width / bounds.width,
|
||||
height / bounds.height
|
||||
) * 0.9;
|
||||
|
||||
// Draw nodes
|
||||
for (const node of this.nodes) {
|
||||
const x = (node.x - bounds.minX) * scale + 5;
|
||||
const y = (node.y - bounds.minY) * scale + 5;
|
||||
|
||||
this.ctx.fillStyle = this.getNodeColor(node);
|
||||
this.ctx.beginPath();
|
||||
this.ctx.arc(x, y, 3, 0, Math.PI * 2);
|
||||
this.ctx.fill();
|
||||
}
|
||||
|
||||
// Draw viewport rectangle
|
||||
if (this.viewportRect) {
|
||||
this.ctx.strokeStyle = '#007bff';
|
||||
this.ctx.lineWidth = 2;
|
||||
this.ctx.strokeRect(
|
||||
(this.viewportRect.x - bounds.minX) * scale + 5,
|
||||
(this.viewportRect.y - bounds.minY) * scale + 5,
|
||||
this.viewportRect.width * scale,
|
||||
this.viewportRect.height * scale
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Styling System
|
||||
|
||||
### CSS Variables (Design Tokens)
|
||||
|
||||
```scss
|
||||
// styles/variables.scss
|
||||
:root {
|
||||
// Colors
|
||||
--color-primary: #007bff;
|
||||
--color-success: #28a745;
|
||||
--color-warning: #ffc107;
|
||||
--color-danger: #dc3545;
|
||||
--color-info: #17a2b8;
|
||||
|
||||
// Light theme
|
||||
--bg-primary: #ffffff;
|
||||
--bg-secondary: #f8f9fa;
|
||||
--bg-tertiary: #e9ecef;
|
||||
--bg-hover: #f0f0f0;
|
||||
--text-primary: #212529;
|
||||
--text-secondary: #6c757d;
|
||||
--border-color: #dee2e6;
|
||||
|
||||
// Spacing
|
||||
--spacing-xs: 4px;
|
||||
--spacing-sm: 8px;
|
||||
--spacing-md: 16px;
|
||||
--spacing-lg: 24px;
|
||||
--spacing-xl: 32px;
|
||||
|
||||
// Typography
|
||||
--font-family-base: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
|
||||
--font-family-mono: 'SF Mono', Consolas, 'Liberation Mono', monospace;
|
||||
--font-size-xs: 11px;
|
||||
--font-size-sm: 13px;
|
||||
--font-size-md: 14px;
|
||||
--font-size-lg: 16px;
|
||||
|
||||
// Shadows
|
||||
--shadow-sm: 0 1px 2px rgba(0, 0, 0, 0.05);
|
||||
--shadow-md: 0 4px 6px rgba(0, 0, 0, 0.1);
|
||||
--shadow-lg: 0 10px 15px rgba(0, 0, 0, 0.1);
|
||||
|
||||
// Border radius
|
||||
--radius-sm: 4px;
|
||||
--radius-md: 6px;
|
||||
--radius-lg: 8px;
|
||||
--radius-full: 9999px;
|
||||
|
||||
// Transitions
|
||||
--transition-fast: 150ms ease;
|
||||
--transition-normal: 200ms ease;
|
||||
--transition-slow: 300ms ease;
|
||||
}
|
||||
|
||||
// Dark theme
|
||||
.dark-mode {
|
||||
--bg-primary: #1a1a2e;
|
||||
--bg-secondary: #16213e;
|
||||
--bg-tertiary: #0f3460;
|
||||
--bg-hover: #2a2a4a;
|
||||
--text-primary: #e0e0e0;
|
||||
--text-secondary: #a0a0a0;
|
||||
--border-color: #3a3a5a;
|
||||
}
|
||||
```
|
||||
|
||||
### Component Styling Pattern
|
||||
|
||||
```scss
|
||||
// component.component.scss
|
||||
:host {
|
||||
display: block;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.container {
|
||||
background: var(--bg-primary);
|
||||
border: 1px solid var(--border-color);
|
||||
border-radius: var(--radius-lg);
|
||||
padding: var(--spacing-md);
|
||||
}
|
||||
|
||||
.header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: var(--spacing-md);
|
||||
|
||||
.title {
|
||||
font-size: var(--font-size-lg);
|
||||
font-weight: 600;
|
||||
color: var(--text-primary);
|
||||
}
|
||||
}
|
||||
|
||||
.badge {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
padding: 2px 8px;
|
||||
border-radius: var(--radius-full);
|
||||
font-size: var(--font-size-xs);
|
||||
font-weight: 500;
|
||||
|
||||
&.success {
|
||||
background: rgba(40, 167, 69, 0.1);
|
||||
color: var(--color-success);
|
||||
}
|
||||
|
||||
&.danger {
|
||||
background: rgba(220, 53, 69, 0.1);
|
||||
color: var(--color-danger);
|
||||
}
|
||||
}
|
||||
|
||||
// Animations
|
||||
.fade-in {
|
||||
animation: fadeIn var(--transition-normal);
|
||||
}
|
||||
|
||||
@keyframes fadeIn {
|
||||
from { opacity: 0; transform: translateY(-10px); }
|
||||
to { opacity: 1; transform: translateY(0); }
|
||||
}
|
||||
|
||||
// Responsive
|
||||
@media (max-width: 768px) {
|
||||
.container {
|
||||
padding: var(--spacing-sm);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Unit Test Structure
|
||||
|
||||
```typescript
|
||||
// component.component.spec.ts
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
import { signal } from '@angular/core';
|
||||
import { ExplainerTimelineComponent } from './explainer-timeline.component';
|
||||
import { ExplainerService } from './explainer.service';
|
||||
|
||||
describe('ExplainerTimelineComponent', () => {
|
||||
let component: ExplainerTimelineComponent;
|
||||
let fixture: ComponentFixture<ExplainerTimelineComponent>;
|
||||
let mockService: jasmine.SpyObj<ExplainerService>;
|
||||
|
||||
beforeEach(async () => {
|
||||
mockService = jasmine.createSpyObj('ExplainerService', ['getExplanation']);
|
||||
|
||||
await TestBed.configureTestingModule({
|
||||
imports: [ExplainerTimelineComponent],
|
||||
providers: [
|
||||
{ provide: ExplainerService, useValue: mockService }
|
||||
]
|
||||
}).compileComponents();
|
||||
|
||||
fixture = TestBed.createComponent(ExplainerTimelineComponent);
|
||||
component = fixture.componentInstance;
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should display loading state', () => {
|
||||
component.loading = true;
|
||||
fixture.detectChanges();
|
||||
|
||||
const loadingEl = fixture.nativeElement.querySelector('.loading-state');
|
||||
expect(loadingEl).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should render steps in order', () => {
|
||||
component.data = {
|
||||
steps: [
|
||||
{ id: '1', sequence: 1, title: 'Step 1', status: 'success' },
|
||||
{ id: '2', sequence: 2, title: 'Step 2', status: 'success' }
|
||||
]
|
||||
};
|
||||
fixture.detectChanges();
|
||||
|
||||
const steps = fixture.nativeElement.querySelectorAll('.step-card');
|
||||
expect(steps.length).toBe(2);
|
||||
expect(steps[0].textContent).toContain('Step 1');
|
||||
});
|
||||
|
||||
it('should expand step on click', () => {
|
||||
component.data = {
|
||||
steps: [{ id: '1', sequence: 1, title: 'Step 1', children: [{ id: '1a' }] }]
|
||||
};
|
||||
fixture.detectChanges();
|
||||
|
||||
const stepCard = fixture.nativeElement.querySelector('.step-card');
|
||||
stepCard.click();
|
||||
fixture.detectChanges();
|
||||
|
||||
expect(component.isExpanded('1')).toBeTrue();
|
||||
});
|
||||
|
||||
it('should emit copy event with correct format', () => {
|
||||
spyOn(component.copyClick, 'emit');
|
||||
|
||||
component.copyToClipboard('markdown');
|
||||
|
||||
expect(component.copyClick.emit).toHaveBeenCalledWith('markdown');
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Service Test Pattern
|
||||
|
||||
```typescript
|
||||
// service.service.spec.ts
|
||||
import { TestBed } from '@angular/core/testing';
|
||||
import { HttpClientTestingModule, HttpTestingController } from '@angular/common/http/testing';
|
||||
import { LineageGraphService } from './lineage-graph.service';
|
||||
|
||||
describe('LineageGraphService', () => {
|
||||
let service: LineageGraphService;
|
||||
let httpMock: HttpTestingController;
|
||||
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
imports: [HttpClientTestingModule],
|
||||
providers: [LineageGraphService]
|
||||
});
|
||||
|
||||
service = TestBed.inject(LineageGraphService);
|
||||
httpMock = TestBed.inject(HttpTestingController);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
httpMock.verify();
|
||||
});
|
||||
|
||||
it('should fetch lineage graph', () => {
|
||||
const mockGraph = {
|
||||
nodes: [{ id: '1', artifactDigest: 'sha256:abc' }],
|
||||
edges: []
|
||||
};
|
||||
|
||||
service.getLineage('sha256:abc', 'tenant-1').subscribe(graph => {
|
||||
expect(graph.nodes.length).toBe(1);
|
||||
});
|
||||
|
||||
const req = httpMock.expectOne('/api/v1/lineage/sha256%3Aabc?tenantId=tenant-1');
|
||||
expect(req.request.method).toBe('GET');
|
||||
req.flush(mockGraph);
|
||||
});
|
||||
|
||||
it('should cache results', () => {
|
||||
const mockGraph = { nodes: [], edges: [] };
|
||||
|
||||
// First call
|
||||
service.getLineage('sha256:abc', 'tenant-1').subscribe();
|
||||
httpMock.expectOne('/api/v1/lineage/sha256%3Aabc?tenantId=tenant-1').flush(mockGraph);
|
||||
|
||||
// Second call should use cache
|
||||
service.getLineage('sha256:abc', 'tenant-1').subscribe();
|
||||
httpMock.expectNone('/api/v1/lineage/sha256%3Aabc?tenantId=tenant-1');
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Accessibility Requirements
|
||||
|
||||
### ARIA Guidelines
|
||||
|
||||
```typescript
|
||||
// Accessible component example
|
||||
@Component({
|
||||
template: `
|
||||
<div
|
||||
class="step-card"
|
||||
role="button"
|
||||
[attr.aria-expanded]="expanded"
|
||||
[attr.aria-controls]="'step-details-' + step.id"
|
||||
tabindex="0"
|
||||
(click)="toggle()"
|
||||
(keydown.enter)="toggle()"
|
||||
(keydown.space)="toggle(); $event.preventDefault()">
|
||||
|
||||
<span class="step-title">{{ step.title }}</span>
|
||||
<span class="sr-only">
|
||||
{{ expanded ? 'Collapse' : 'Expand' }} step details
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div
|
||||
[id]="'step-details-' + step.id"
|
||||
[hidden]="!expanded"
|
||||
role="region"
|
||||
[attr.aria-labelledby]="'step-title-' + step.id">
|
||||
<!-- Expanded content -->
|
||||
</div>
|
||||
`,
|
||||
styles: [`
|
||||
.sr-only {
|
||||
position: absolute;
|
||||
width: 1px;
|
||||
height: 1px;
|
||||
padding: 0;
|
||||
margin: -1px;
|
||||
overflow: hidden;
|
||||
clip: rect(0, 0, 0, 0);
|
||||
border: 0;
|
||||
}
|
||||
|
||||
[role="button"]:focus {
|
||||
outline: 2px solid var(--color-primary);
|
||||
outline-offset: 2px;
|
||||
}
|
||||
`]
|
||||
})
|
||||
```
|
||||
|
||||
### Keyboard Navigation
|
||||
|
||||
| Key | Action |
|
||||
|-----|--------|
|
||||
| Tab | Move focus to next interactive element |
|
||||
| Shift+Tab | Move focus to previous element |
|
||||
| Enter/Space | Activate focused button/link |
|
||||
| Escape | Close modal/popover |
|
||||
| Arrow keys | Navigate within lists/trees |
|
||||
| Home/End | Jump to first/last item |
|
||||
|
||||
---
|
||||
|
||||
## Sprint Task Reference
|
||||
|
||||
### FE_003: CGS Integration (3-5 days)
|
||||
- Wire `lineage-graph.service` to new CGS APIs
|
||||
- Add CGS hash display to `lineage-node.component`
|
||||
- Wire `proof-tree.component` to verdict traces
|
||||
- Add "Replay Verdict" button to hover card
|
||||
- Display confidence factor chips
|
||||
|
||||
### FE_004: Proof Studio (5-7 days)
|
||||
- Implement `ConfidenceBreakdownComponent`
|
||||
- Implement `ConfidenceFactorChip`
|
||||
- Implement `WhatIfSliderComponent`
|
||||
- Wire proof-tree to CGS proof traces
|
||||
- Add confidence breakdown to verdict card
|
||||
|
||||
### FE_005: Explainer Timeline (5-7 days)
|
||||
- Create `ExplainerTimelineComponent`
|
||||
- Create `ExplainerStepComponent`
|
||||
- Design step data model
|
||||
- Add step expansion with animation
|
||||
- Wire to ProofTrace API
|
||||
- Implement copy-to-clipboard
|
||||
|
||||
### FE_006: Node Diff Table (4-5 days)
|
||||
- Create `DiffTableComponent`
|
||||
- Implement column definitions
|
||||
- Add row expansion template
|
||||
- Implement filter chips
|
||||
- Add sorting functionality
|
||||
- Implement row selection
|
||||
|
||||
### FE_007: Pinned Explanations (2-3 days)
|
||||
- Create `PinnedExplanationService`
|
||||
- Create `PinnedPanelComponent`
|
||||
- Add pin buttons to Explainer Timeline
|
||||
- Add pin buttons to Diff Table rows
|
||||
- Implement format templates (Markdown, JSON, HTML, Jira)
|
||||
- Add copy-to-clipboard with toast
|
||||
|
||||
### FE_008: Reachability Gate Diff (3-4 days)
|
||||
- Enhance `ReachabilityDiffComponent`
|
||||
- Create `GateChipComponent`
|
||||
- Create `PathComparisonComponent`
|
||||
- Create `ConfidenceBarComponent`
|
||||
- Add gate expansion panel
|
||||
- Add call graph mini-visualization
|
||||
|
||||
### FE_009: Audit Pack Export (2-3 days)
|
||||
- Enhance `AuditPackExportComponent`
|
||||
- Create `ExportOptionsComponent`
|
||||
- Create `MerkleDisplayComponent`
|
||||
- Add signing options
|
||||
- Implement progress tracking
|
||||
- Add download handling
|
||||
|
||||
---
|
||||
|
||||
## Appendix: Data Model Reference
|
||||
|
||||
See `src/app/features/lineage/models/lineage.models.ts` for complete type definitions including:
|
||||
- `LineageNode`
|
||||
- `LineageEdge`
|
||||
- `LineageGraph`
|
||||
- `LineageDiffResponse`
|
||||
- `ComponentDiff`
|
||||
- `VexDelta`
|
||||
- `ReachabilityDelta`
|
||||
- `AttestationLink`
|
||||
- `ViewOptions`
|
||||
- `SelectionState`
|
||||
- `HoverCardState`
|
||||
@@ -0,0 +1,227 @@
|
||||
# ADVISORY_20251229: SBOM Lineage Graph & Testing Infrastructure
|
||||
|
||||
## Advisory Classification
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Advisory ID** | ADVISORY_20251229_SBOM_LINEAGE_AND_TESTING |
|
||||
| **Date** | 2025-12-29 |
|
||||
| **Priority** | HIGH |
|
||||
| **Verdict** | **PROCEED** - High value, aligns with Stella Ops vision |
|
||||
| **Existing Coverage** | ~70% architecturally designed, ~20% implemented |
|
||||
|
||||
## Executive Summary
|
||||
|
||||
The advisory proposes:
|
||||
1. **SBOM Lineage Graph** - Git-like visualization with hover-to-proof UX
|
||||
2. **Testing Infrastructure** - Fixture harvesting, golden tests, determinism verification
|
||||
3. **Backport Detection Algorithm** - Fix rules model with distro-specific extractors
|
||||
4. **VEX Lattice Tests** - Truth table verification for merge correctness
|
||||
5. **Scheduler Resilience** - Chaos and load tests
|
||||
6. **E2E Replayable Verdict** - Full pipeline replay verification
|
||||
|
||||
**Verdict:** These proposals are **highly aligned** with Stella Ops' core differentiators:
|
||||
- **Determinism** (reproducible vulnerability assessments)
|
||||
- **Offline-first** (air-gapped operation)
|
||||
- **VEX-first decisioning** (lattice-based consensus)
|
||||
- **Explainability** (proof chains and evidence)
|
||||
|
||||
Most of the **architecture already exists** in documentation. The gap is **implementation and test coverage**.
|
||||
|
||||
---
|
||||
|
||||
## Gap Analysis Summary
|
||||
|
||||
| Feature | Architecture | Implementation | Tests | Recommendation |
|
||||
|---------|--------------|----------------|-------|----------------|
|
||||
| SBOM Lineage Graph | 100% | 20% | 0% | **Proceed with existing sprints** |
|
||||
| Testing Infrastructure | 70% | 40% | N/A | **Create FixtureHarvester** |
|
||||
| Backport Status Service | 50% | 30% | 10% | **Formalize algorithm** |
|
||||
| VEX Lattice Truth Tables | 100% | 60% | 10% | **Add systematic tests** |
|
||||
| Scheduler Resilience | 80% | 70% | 20% | **Add chaos tests** |
|
||||
| E2E Replayable Verdict | 90% | 40% | 5% | **Wire components** |
|
||||
|
||||
---
|
||||
|
||||
## Existing Infrastructure (Already in Stella Ops)
|
||||
|
||||
### 1. SBOM Lineage Architecture (docs/modules/sbomservice/lineage/)
|
||||
|
||||
**Status:** FULLY DESIGNED, NOT IMPLEMENTED
|
||||
|
||||
- `IOciAncestryExtractor` - Extract base image refs from OCI config
|
||||
- `ISbomLineageEdgeRepository` - Persist DAG edges (parent, build, base)
|
||||
- `IVexDeltaRepository` - Track status transitions
|
||||
- `ISbomVerdictLinkRepository` - Link SBOM versions to VEX consensus
|
||||
- `ILineageGraphService` - Query and diff lineage
|
||||
- Database schema for `sbom_lineage_edges`, `vex_deltas`, `sbom_verdict_links`
|
||||
- API endpoints: `GET /lineage/{digest}`, `GET /lineage/diff`, `POST /lineage/export`
|
||||
|
||||
### 2. Testing Infrastructure (src/__Tests/)
|
||||
|
||||
**Status:** PARTIAL INFRASTRUCTURE EXISTS
|
||||
|
||||
- `StellaOps.Testing.Determinism/` with `DeterminismVerifier`
|
||||
- `StellaOps.Testing.AirGap/` with `NetworkIsolatedTestBase`
|
||||
- `__Benchmarks/golden-corpus/` for canonical test cases
|
||||
- `__Datasets/` for ground truth samples
|
||||
- Standardized test categories (Unit, Integration, Determinism, AirGap, Chaos)
|
||||
|
||||
**Gap:** No `FixtureHarvester` tool, no per-fixture `meta.json` manifests
|
||||
|
||||
### 3. Feedser Evidence Collection (src/Feedser/)
|
||||
|
||||
**Status:** LIBRARY EXISTS
|
||||
|
||||
- `HunkSigExtractor` for patch signature extraction
|
||||
- `BinaryFingerprintFactory` with TLSH and instruction hash fingerprinters
|
||||
- Four-tier evidence model (Tier 1-4 confidence levels)
|
||||
- Consumed by Concelier `ProofService`
|
||||
|
||||
### 4. VexLens Consensus (src/VexLens/)
|
||||
|
||||
**Status:** CORE ENGINE EXISTS
|
||||
|
||||
- Lattice states: `unknown < under_investigation < not_affected | affected < fixed`
|
||||
- `VexConsensusEngine` for merge computation
|
||||
- `OpenVexNormalizer` and `CsafVexNormalizer`
|
||||
- Conflict tracking with detailed arrays
|
||||
- Trust tier provenance from Excititor connectors
|
||||
|
||||
**Gap:** No systematic truth table tests
|
||||
|
||||
### 5. Replay Infrastructure (src/Replay/)
|
||||
|
||||
**Status:** MODELS AND SERVICE DESIGNED
|
||||
|
||||
- `ReplayManifest` v1/v2 schema
|
||||
- `ReplayToken` generation and verification
|
||||
- `PolicySimulationInputLock` for pinning
|
||||
- Scanner `RecordModeService` for bundle capture
|
||||
|
||||
**Gap:** No `VerdictBuilder` orchestration service (Sprint CGS-001)
|
||||
|
||||
### 6. Concelier Advisory Ingestion (src/Concelier/)
|
||||
|
||||
**Status:** PRODUCTION READY
|
||||
|
||||
- Link-Not-Merge architecture
|
||||
- Multiple connectors: CSAF (Red Hat, SUSE, Ubuntu, Oracle, Microsoft), OSV, GHSA
|
||||
- Version range normalization (EVR, dpkg, apk, semver)
|
||||
- Conflict detection in linksets
|
||||
|
||||
---
|
||||
|
||||
## Recommended Sprint Batch
|
||||
|
||||
Based on the gap analysis, the following sprints have been created:
|
||||
|
||||
### Batch 001 (Already Exists)
|
||||
|
||||
| Sprint | Topic | Status |
|
||||
|--------|-------|--------|
|
||||
| `SPRINT_20251229_001_001_BE_cgs_infrastructure` | Verdict Builder (CGS) | TODO |
|
||||
| `SPRINT_20251229_001_002_BE_vex_delta` | VEX Delta Persistence | TODO |
|
||||
| `SPRINT_20251229_001_003_FE_lineage_graph` | Lineage Visualization | TODO |
|
||||
|
||||
### Batch 004 (New - From This Advisory)
|
||||
|
||||
| Sprint | Topic | Tasks |
|
||||
|--------|-------|-------|
|
||||
| `SPRINT_20251229_004_001_LIB_fixture_harvester` | FixtureHarvester Tool | 10 tasks |
|
||||
| `SPRINT_20251229_004_002_BE_backport_status_service` | Backport Status Retrieval | 11 tasks |
|
||||
| `SPRINT_20251229_004_003_BE_vexlens_truth_tables` | VexLens Truth Table Tests | 9 tasks |
|
||||
| `SPRINT_20251229_004_004_BE_scheduler_resilience` | Scheduler Chaos Tests | 8 tasks |
|
||||
| `SPRINT_20251229_004_005_E2E_replayable_verdict` | E2E Replay Tests | 8 tasks |
|
||||
|
||||
---
|
||||
|
||||
## Priority Ranking
|
||||
|
||||
### P0 - Critical Path (Blocks Other Work)
|
||||
|
||||
1. **Batch 001** - CGS infrastructure and VEX delta persistence
|
||||
- Required for lineage graph and replay features
|
||||
- Existing sprints, well-defined tasks
|
||||
|
||||
2. **SPRINT_20251229_004_003_BE_vexlens_truth_tables**
|
||||
- VexLens is core to the platform; truth tables validate correctness
|
||||
- Low effort, high confidence gain
|
||||
|
||||
### P1 - High Value
|
||||
|
||||
3. **SPRINT_20251229_004_005_E2E_replayable_verdict**
|
||||
- E2E tests catch integration issues early
|
||||
- Validates the core "deterministic reproducibility" claim
|
||||
|
||||
4. **SPRINT_20251229_004_001_LIB_fixture_harvester**
|
||||
- Enables systematic fixture management
|
||||
- Supports all test categories
|
||||
|
||||
### P2 - Important
|
||||
|
||||
5. **SPRINT_20251229_004_002_BE_backport_status_service**
|
||||
- Reduces false positives for distro packages
|
||||
- Requires distro-specific extractors (effort)
|
||||
|
||||
6. **SPRINT_20251229_004_004_BE_scheduler_resilience**
|
||||
- Chaos tests for production readiness
|
||||
- Can be parallelized with other work
|
||||
|
||||
---
|
||||
|
||||
## Alignment with Stella Ops Vision
|
||||
|
||||
| Advisory Proposal | Stella Ops Principle | Alignment |
|
||||
|-------------------|---------------------|-----------|
|
||||
| SBOM Lineage Graph | Explainability | HIGH - "proof into explorable UX" |
|
||||
| Hover-to-proof | Evidence-first | HIGH - every claim has evidence |
|
||||
| Golden fixtures | Determinism | HIGH - byte-identical outputs |
|
||||
| Replay bundles | Offline-first | HIGH - air-gap verification |
|
||||
| Backport detection | Distro-aware | HIGH - reduces false positives |
|
||||
| Lattice truth tables | VEX-first decisioning | HIGH - validates core algorithm |
|
||||
| Chaos tests | Production readiness | MEDIUM - operational quality |
|
||||
|
||||
---
|
||||
|
||||
## What NOT to Implement
|
||||
|
||||
The advisory proposes some elements that **already exist** or are **out of scope**:
|
||||
|
||||
1. **Determinism harness** - Already exists as `StellaOps.Testing.Determinism/`
|
||||
2. **Canonical JSON** - Already implemented across the codebase
|
||||
3. **Feed parsers** - Concelier connectors already parse NVD/GHSA/OSV
|
||||
4. **Merge algorithm** - VexLens already implements the lattice
|
||||
|
||||
---
|
||||
|
||||
## Success Metrics
|
||||
|
||||
After implementing the recommended sprints:
|
||||
|
||||
| Metric | Target |
|
||||
|--------|--------|
|
||||
| VexLens truth table coverage | 100% of merge scenarios |
|
||||
| SBOM lineage API availability | Production |
|
||||
| E2E replay verification | Pass on 3 platforms (Ubuntu, Alpine, Debian) |
|
||||
| Scheduler chaos test coverage | Crash recovery, backpressure, idempotency |
|
||||
| Fixture manifest coverage | All test fixtures have `meta.json` |
|
||||
| Backport detection accuracy | >90% on Debian/Alpine packages |
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- `docs/modules/sbomservice/lineage/architecture.md`
|
||||
- `docs/modules/vex-lens/architecture.md`
|
||||
- `docs/modules/feedser/architecture.md`
|
||||
- `docs/modules/replay/architecture.md`
|
||||
- `src/__Tests/AGENTS.md`
|
||||
|
||||
## Created Sprints
|
||||
|
||||
- `docs/implplan/SPRINT_20251229_004_001_LIB_fixture_harvester.md`
|
||||
- `docs/implplan/SPRINT_20251229_004_002_BE_backport_status_service.md`
|
||||
- `docs/implplan/SPRINT_20251229_004_003_BE_vexlens_truth_tables.md`
|
||||
- `docs/implplan/SPRINT_20251229_004_004_BE_scheduler_resilience.md`
|
||||
- `docs/implplan/SPRINT_20251229_004_005_E2E_replayable_verdict.md`
|
||||
@@ -0,0 +1,133 @@
|
||||
# Advisory Analysis: Deterministic Verdicts (CGS) & SBOM Lineage Graph
|
||||
|
||||
**Advisory Date:** 2025-12-29
|
||||
**Status:** ANALYZED - Superseded by Existing Consolidations
|
||||
**Strategic Value:** HIGH
|
||||
**Implementation Effort:** MEDIUM (gaps only)
|
||||
|
||||
---
|
||||
|
||||
## Executive Summary
|
||||
|
||||
This advisory proposes:
|
||||
1. **SBOM Lineage Graph** - Git-like visualization with hover-to-proof micro-interactions
|
||||
2. **Canonical Graph Signature (CGS)** - Deterministic, replayable verdicts
|
||||
3. **Proof Studio UX** - Explainable confidence scoring
|
||||
|
||||
**Verdict:** The advisory validates StellaOps' existing architecture direction. **~90% is already implemented.** The remaining work is minor integration, not invention.
|
||||
|
||||
**Revision Note (2025-12-29):** Deeper exploration revealed the frontend is more complete than initially assessed:
|
||||
- 41 TypeScript files in lineage feature
|
||||
- 31 visualization components already exist
|
||||
- Proof tree, hover cards, compare mode, diff views all implemented
|
||||
- Frontend sprints revised to minor integration tasks
|
||||
|
||||
---
|
||||
|
||||
## Prior Art (Already Consolidated)
|
||||
|
||||
| Advisory Concept | Existing Document | Status |
|
||||
|-----------------|-------------------|--------|
|
||||
| SBOM Lineage Graph | `ADVISORY_SBOM_LINEAGE_GRAPH.md` | 70% backend |
|
||||
| Deterministic Verdicts | `CONSOLIDATED - Deterministic Evidence and Verdict Architecture.md` | 85% complete |
|
||||
| Diff-Aware Gates | `CONSOLIDATED - Diff-Aware Release Gates and Risk Budgets.md` | 75% complete |
|
||||
|
||||
---
|
||||
|
||||
## What's Already Implemented
|
||||
|
||||
### ✅ Complete
|
||||
|
||||
| Component | Location |
|
||||
|-----------|----------|
|
||||
| Canonical JSON (RFC 8785 JCS) | `StellaOps.Canonical.Json` |
|
||||
| NFC String Normalization | `StellaOps.Resolver.NfcStringNormalizer` |
|
||||
| Content-Addressed IDs | `Attestor.ProofChain/Identifiers/` |
|
||||
| DSSE Signing | `Signer/`, `Attestor/` |
|
||||
| Merkle Trees | `ProofChain/Merkle/DeterministicMerkleTreeBuilder` |
|
||||
| Determinism Guards | `Policy.Engine/DeterminismGuard/` |
|
||||
| Replay Manifest | `StellaOps.Replay.Core` |
|
||||
| Evidence Sealing | `EvidenceLocker.Core` |
|
||||
| VEX Trust Lattice | `VexLens/OpenVexStatementMerger` |
|
||||
| Delta Verdicts | `Policy/Deltas/DeltaVerdict.cs` |
|
||||
| Rekor Verification | `Attestor.Core/Verification/` |
|
||||
| SBOM Ledger with Lineage | `SbomService/SbomLedgerService` |
|
||||
|
||||
### 🔄 Gaps Identified
|
||||
|
||||
| Gap | Sprint |
|
||||
|-----|--------|
|
||||
| Unified VerdictBuilder service | SPRINT_20251229_001_001_BE |
|
||||
| `POST /verdicts/build` API | SPRINT_20251229_001_001_BE |
|
||||
| Fulcio keyless signing wiring | SPRINT_20251229_001_001_BE |
|
||||
| `policy.lock.json` generator | SPRINT_20251229_001_001_BE |
|
||||
| VEX delta table migration | SPRINT_20251229_001_002_BE |
|
||||
| SBOM-verdict link table | SPRINT_20251229_001_002_BE |
|
||||
| VexLens PostgreSQL backend | SPRINT_20251229_001_002_BE |
|
||||
| Lineage Graph UI component | SPRINT_20251229_001_003_FE |
|
||||
| Hover card micro-interactions | SPRINT_20251229_001_003_FE |
|
||||
| Proof Studio UI | SPRINT_20251229_001_004_FE |
|
||||
| What-if confidence slider | SPRINT_20251229_001_004_FE |
|
||||
|
||||
---
|
||||
|
||||
## Created Sprints
|
||||
|
||||
1. `SPRINT_20251229_001_001_BE_cgs_infrastructure.md` - VerdictBuilder, APIs, Fulcio
|
||||
2. `SPRINT_20251229_001_002_BE_vex_delta.md` - Database migrations
|
||||
3. `SPRINT_20251229_001_003_FE_lineage_graph.md` - Graph visualization
|
||||
4. `SPRINT_20251229_001_004_FE_proof_studio.md` - Explainability UX
|
||||
|
||||
---
|
||||
|
||||
## Recommendation
|
||||
|
||||
**Archive this advisory** as a validation of architecture direction. Reference existing consolidated documents for implementation. Execute the gap-focused sprints above.
|
||||
|
||||
---
|
||||
|
||||
## Original Advisory Content
|
||||
|
||||
The original advisory proposed:
|
||||
|
||||
### Canonical Graph Signature (CGS)
|
||||
> Turn all inputs into a graph (nodes: packages, files, build steps, attestations; edges: depends-on, produced-by), serialize canonically, then hash. **Rule:** `same inputs (bytes + rule set + policy versions) → same CGS → same verdict`.
|
||||
|
||||
**StellaOps Status:** Implemented via `ProofChain/Merkle/DeterministicMerkleTreeBuilder` + content-addressed IDs.
|
||||
|
||||
### Canonicalization Rules
|
||||
> - Sort all collections (lexicographic, locale-independent)
|
||||
> - Normalize IDs (PURL casing, semver normalization)
|
||||
> - Stable timestamps: truncated ISO8601Z or logical time
|
||||
> - No environmental entropy
|
||||
|
||||
**StellaOps Status:** Implemented via `Rfc8785JsonCanonicalizer`, `NfcStringNormalizer`, Policy determinism guards.
|
||||
|
||||
### API Surface
|
||||
> - `POST /verdicts/build`
|
||||
> - `GET /verdicts/{cgs_hash}`
|
||||
> - `POST /verdicts/diff`
|
||||
|
||||
**StellaOps Status:** Gap - needs VerdictBuilder service composition.
|
||||
|
||||
### Rollout Phases
|
||||
> 1. Canonicalize & Hash ✅
|
||||
> 2. CGS & Deterministic Engine ✅
|
||||
> 3. Signed Verdicts (OCI-attach) 🔄
|
||||
> 4. Diff & Time-travel 🔄
|
||||
> 5. Confidence & Proof Studio ❌
|
||||
|
||||
**StellaOps Status:** Phases 1-2 complete, 3-4 partial, 5 needs frontend.
|
||||
|
||||
---
|
||||
|
||||
## References
|
||||
|
||||
- `docs/product-advisories/archived/CONSOLIDATED - Deterministic Evidence and Verdict Architecture.md`
|
||||
- `docs/product-advisories/archived/CONSOLIDATED - Diff-Aware Release Gates and Risk Budgets.md`
|
||||
- `docs/product-advisories/archived/ADVISORY_SBOM_LINEAGE_GRAPH.md`
|
||||
- `docs/modules/attestor/architecture.md` (ProofChain section)
|
||||
- `docs/modules/policy/architecture.md` (Determinism section)
|
||||
- `docs/modules/sbomservice/lineage/architecture.md`
|
||||
- `docs/modules/replay/architecture.md`
|
||||
|
||||
@@ -0,0 +1,247 @@
|
||||
# Gap Analysis: SBOM Lineage Graph & Cross-Distro Vulnerability Intelligence
|
||||
|
||||
> **Analysis Date:** 2025-12-29
|
||||
> **Advisory Source:** Product advisory proposing SBOM Lineage visualization and cross-distro CSAF/VEX unification
|
||||
> **Conclusion:** Advisory significantly underestimates existing implementation. ~85% already complete. Proceed with targeted sprints.
|
||||
|
||||
---
|
||||
|
||||
## Executive Summary
|
||||
|
||||
The product advisory proposed two major features:
|
||||
|
||||
1. **SBOM Lineage Graph** - Git-like visualization of container image ancestry with hover-to-proof micro-interactions
|
||||
2. **Cross-Distro Vulnerability Intelligence** - Unified CSAF/VEX ingestion across Linux distributions
|
||||
|
||||
**Key Finding:** Both features are substantially implemented. The advisory dramatically underestimates existing capability.
|
||||
|
||||
| Feature Area | Advisory Implied | Actual Status | Gap |
|
||||
|--------------|------------------|---------------|-----|
|
||||
| Lineage Architecture | New design needed | 100% documented | None |
|
||||
| Lineage UI Components | Build from scratch | ~80% complete (41 files) | API wiring |
|
||||
| Version Comparators | Need all new | 100% complete | None |
|
||||
| Distro Connectors | Need 5+ connectors | 5/6 complete | Astra only |
|
||||
| Patch Fingerprinting | New capability | 100% complete | None |
|
||||
| Trust Lattice | New framework | 100% complete | None |
|
||||
| Proposed UAS Schema | Adopt schema | **SKIP** | Existing model superior |
|
||||
|
||||
**Recommendation:** Execute 3 targeted sprints (~34 tasks) instead of ~50+ implied by advisory.
|
||||
|
||||
---
|
||||
|
||||
## Detailed Gap Analysis
|
||||
|
||||
### 1. SBOM Lineage Graph
|
||||
|
||||
#### Architecture (docs/modules/sbomservice/lineage/architecture.md)
|
||||
|
||||
| Component | Status | Evidence |
|
||||
|-----------|--------|----------|
|
||||
| DAG data model | ✅ Complete | `LineageNode`, `LineageEdge` records defined |
|
||||
| Edge types (parent/build/base) | ✅ Complete | `LineageRelationship` enum with 3 types |
|
||||
| Node badges (vulns/signature) | ✅ Complete | Badge structure in architecture |
|
||||
| Replay hash integration | ✅ Complete | `replayHash` field on nodes |
|
||||
| API contracts | ✅ Documented | 3 endpoints fully specified |
|
||||
| Database schema | ✅ Designed | 3 tables with indexes |
|
||||
| Caching strategy | ✅ Designed | Valkey keys with TTLs |
|
||||
| Determinism rules | ✅ Specified | Ordering rules documented |
|
||||
|
||||
**Gap:** API endpoints not implemented. Database tables not migrated.
|
||||
|
||||
#### UI Components (src/Web/StellaOps.Web/src/app/features/lineage/)
|
||||
|
||||
| Component | Files | Status |
|
||||
|-----------|-------|--------|
|
||||
| Main graph visualization | `lineage-graph.component.ts` | ✅ 1000+ LOC |
|
||||
| Hover cards | `lineage-hover-card.component.ts` | ✅ Complete |
|
||||
| SBOM diff display | `lineage-sbom-diff.component.ts` | ✅ Complete |
|
||||
| VEX diff display | `lineage-vex-diff.component.ts` | ✅ Complete |
|
||||
| Compare panel | `lineage-compare-panel.component.ts` | ✅ Complete |
|
||||
| Services | `lineage.service.ts` | ⚠️ Stubs only |
|
||||
|
||||
**Gap:** Services use mock data. Need API wiring.
|
||||
|
||||
### 2. Cross-Distro Vulnerability Intelligence
|
||||
|
||||
#### Advisory Connectors (src/Concelier/__Connectors/)
|
||||
|
||||
| Distro | Connector | Version Comparator | Status |
|
||||
|--------|-----------|-------------------|--------|
|
||||
| Red Hat | `StellaOps.Concelier.Connector.RedHat` | rpm NEVRA | ✅ Complete |
|
||||
| SUSE | `StellaOps.Concelier.Connector.Suse` | rpm NEVRA | ✅ Complete |
|
||||
| Ubuntu | `StellaOps.Concelier.Connector.Ubuntu` | dpkg EVR | ✅ Complete |
|
||||
| Debian | `StellaOps.Concelier.Connector.Debian` | dpkg EVR | ✅ Complete |
|
||||
| Alpine | `StellaOps.Concelier.Connector.Alpine` | apk -r<pkgrel> | ✅ Complete |
|
||||
| **Astra Linux** | None | dpkg EVR (inherit) | ❌ **Gap** |
|
||||
|
||||
#### Version Comparators (src/__Libraries/StellaOps.VersionComparison/)
|
||||
|
||||
| Comparator | Location | Status |
|
||||
|------------|----------|--------|
|
||||
| `RpmVersionComparer` | `Comparers/RpmVersionComparer.cs` | ✅ Complete |
|
||||
| `DebianVersionComparer` | `Comparers/DebianVersionComparer.cs` | ✅ Complete |
|
||||
| `ApkVersionComparer` | `src/Concelier/__Libraries/.../ApkVersionComparer.cs` | ✅ Complete |
|
||||
| `SemVerComparer` | `Comparers/SemVerComparer.cs` | ✅ Complete |
|
||||
|
||||
**Gap:** None. All version comparators implemented.
|
||||
|
||||
#### Patch Fingerprinting (Feedser)
|
||||
|
||||
| Component | Location | Status |
|
||||
|-----------|----------|--------|
|
||||
| HunkSig extractor | `src/Feedser/StellaOps.Feedser.Core/HunkSigExtractor.cs` | ✅ Complete |
|
||||
| Binary fingerprinting | `src/Feedser/StellaOps.Feedser.BinaryAnalysis/` | ✅ Complete |
|
||||
| TLSH fuzzy hashing | `Fingerprinters/SimplifiedTlshFingerprinter.cs` | ✅ Complete |
|
||||
| Instruction hash | `Fingerprinters/InstructionHashFingerprinter.cs` | ✅ Complete |
|
||||
|
||||
**Gap:** None. Four-tier evidence system fully implemented.
|
||||
|
||||
#### Trust Lattice (VexLens)
|
||||
|
||||
| Component | Status | Evidence |
|
||||
|-----------|--------|----------|
|
||||
| 3-component trust vector | ✅ Complete | Provenance/Coverage/Replayability |
|
||||
| Lattice join semantics | ✅ Complete | `unknown < under_investigation < ...` |
|
||||
| Weighted scoring | ✅ Complete | Configurable weights in consensus |
|
||||
| Issuer trust profiles | ✅ Complete | IssuerDirectory integration |
|
||||
|
||||
**Gap:** None. Trust framework fully implemented.
|
||||
|
||||
### 3. Proposed UAS Schema - **RECOMMENDATION: SKIP**
|
||||
|
||||
The advisory proposed a "Unified Advisory Schema" (UAS). Analysis shows this should be **skipped**:
|
||||
|
||||
| Aspect | Proposed UAS | Existing Model | Decision |
|
||||
|--------|--------------|----------------|----------|
|
||||
| Conflict handling | Silent merge | Link-Not-Merge (preserves conflicts) | **Existing superior** |
|
||||
| Trust modeling | Single score | 3-component vector | **Existing superior** |
|
||||
| Evidence provenance | Lost in merge | AdvisoryLinkset preserves | **Existing superior** |
|
||||
| AOC compliance | Unknown | Append-Only Contract enforced | **Existing superior** |
|
||||
|
||||
The existing `AdvisoryObservation` + `AdvisoryLinkset` model with Link-Not-Merge semantics is architecturally superior. UAS would require significant regression.
|
||||
|
||||
---
|
||||
|
||||
## Sprint Execution Plan
|
||||
|
||||
### Sprint Dependency Graph
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ │
|
||||
│ SPRINT_20251229_005_001_BE_sbom_lineage_api │
|
||||
│ (13 tasks) │
|
||||
│ - Database migrations │
|
||||
│ - Repository implementations │
|
||||
│ - API endpoints │
|
||||
│ - Caching layer │
|
||||
│ │
|
||||
└──────────────────────────┬──────────────────────────────────┘
|
||||
│
|
||||
▼
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ │
|
||||
│ SPRINT_20251229_005_003_FE_lineage_ui_wiring │
|
||||
│ (9 tasks) │
|
||||
│ - Service API calls │
|
||||
│ - Component data binding │
|
||||
│ - Error/loading states │
|
||||
│ - E2E tests │
|
||||
│ │
|
||||
└─────────────────────────────────────────────────────────────┘
|
||||
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ │
|
||||
│ SPRINT_20251229_005_002_CONCEL_astra_connector │
|
||||
│ (12 tasks) - INDEPENDENT │
|
||||
│ - Research advisory format │
|
||||
│ - Connector implementation │
|
||||
│ - Version matcher (dpkg EVR) │
|
||||
│ - Integration tests │
|
||||
│ │
|
||||
└─────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### Task Summary
|
||||
|
||||
| Sprint | Module | Tasks | Effort Estimate |
|
||||
|--------|--------|-------|-----------------|
|
||||
| 005_001 | BE (SbomService) | 13 | Medium |
|
||||
| 005_002 | CONCEL (Concelier) | 12 | Medium-High (research required) |
|
||||
| 005_003 | FE (Web) | 9 | Low-Medium |
|
||||
| **Total** | | **34** | |
|
||||
|
||||
### Critical Path
|
||||
|
||||
1. **BE API (005_001)** must complete before **FE Wiring (005_003)**
|
||||
2. **Astra Connector (005_002)** is independent - can run in parallel
|
||||
3. No blocking dependencies on existing CGS infrastructure sprint (005_001_001)
|
||||
|
||||
---
|
||||
|
||||
## Architecture Decisions
|
||||
|
||||
### Confirmed Decisions (No Change Needed)
|
||||
|
||||
| ID | Decision | Rationale |
|
||||
|----|----------|-----------|
|
||||
| AD-001 | Link-Not-Merge for advisories | Preserves conflict evidence |
|
||||
| AD-002 | 3-component trust vector | Superior to single score |
|
||||
| AD-003 | Deterministic JSON serialization | Enables replay verification |
|
||||
| AD-004 | Valkey for hover cache | Matches existing infrastructure |
|
||||
| AD-005 | dpkg EVR for Astra | Astra is Debian-based |
|
||||
|
||||
### Pending Decisions
|
||||
|
||||
| ID | Decision | Owner | Deadline |
|
||||
|----|----------|-------|----------|
|
||||
| PD-001 | Astra advisory feed format | Research in Sprint 005_002 | Before ASTRA-002 |
|
||||
| PD-002 | Evidence pack size limit | Product | Before LIN-010 |
|
||||
| PD-003 | Astra air-gap bundle strategy | Operations | Before ASTRA-007 |
|
||||
|
||||
---
|
||||
|
||||
## Risk Assessment
|
||||
|
||||
| Risk | Probability | Impact | Mitigation |
|
||||
|------|------------|--------|------------|
|
||||
| Astra feed unavailable or undocumented | Medium | High | Contact Astra directly; fall back to manual advisory import |
|
||||
| UI components need significant refactoring | Low | Medium | Components are well-structured; only service layer changes |
|
||||
| Backend API performance under load | Low | Medium | Caching strategy designed; load test before production |
|
||||
| Database migration conflicts | Low | Low | Migrations are additive only |
|
||||
|
||||
---
|
||||
|
||||
## Appendix: Evidence Locations
|
||||
|
||||
### Documentation
|
||||
- `docs/modules/sbomservice/lineage/architecture.md` - Lineage architecture
|
||||
- `docs/modules/concelier/architecture.md` - Advisory ingestion
|
||||
- `docs/modules/feedser/architecture.md` - Patch fingerprinting
|
||||
- `docs/modules/vex-lens/architecture.md` - Trust lattice
|
||||
|
||||
### Code
|
||||
- `src/Web/StellaOps.Web/src/app/features/lineage/` - UI components (41 files)
|
||||
- `src/Concelier/__Connectors/` - Advisory connectors (5 implemented)
|
||||
- `src/__Libraries/StellaOps.VersionComparison/` - Version comparators
|
||||
- `src/Feedser/` - Patch signature extraction
|
||||
|
||||
### Sprints Created
|
||||
- `docs/implplan/SPRINT_20251229_005_001_BE_sbom_lineage_api.md`
|
||||
- `docs/implplan/SPRINT_20251229_005_002_CONCEL_astra_connector.md`
|
||||
- `docs/implplan/SPRINT_20251229_005_003_FE_lineage_ui_wiring.md`
|
||||
|
||||
---
|
||||
|
||||
## Conclusion
|
||||
|
||||
The product advisory is **valuable for prioritization** but significantly underestimates existing implementation maturity. The StellaOps codebase already contains:
|
||||
|
||||
- Complete architecture documentation for SBOM Lineage
|
||||
- ~80% complete UI implementation
|
||||
- 5 of 6 distro connectors fully implemented
|
||||
- All version comparators implemented
|
||||
- Complete patch fingerprinting and trust frameworks
|
||||
|
||||
**Recommended Action:** Execute the 3 targeted sprints totaling 34 tasks. Skip the proposed UAS schema in favor of the existing superior model. The only significant new development is the Astra Linux connector.
|
||||
|
||||
@@ -0,0 +1,738 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Scanner.Sources.Contracts;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using StellaOps.Scanner.Sources.Services;
|
||||
using StellaOps.Scanner.WebService.Constants;
|
||||
using StellaOps.Scanner.WebService.Infrastructure;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
|
||||
/// <summary>
|
||||
/// Endpoints for managing SBOM sources (Zastava, Docker, CLI, Git).
|
||||
/// </summary>
|
||||
internal static class SourcesEndpoints
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
Converters = { new JsonStringEnumConverter() },
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
public static void MapSourcesEndpoints(this RouteGroupBuilder apiGroup, string sourcesSegment = "/sources")
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(apiGroup);
|
||||
|
||||
var sources = apiGroup.MapGroup(sourcesSegment);
|
||||
|
||||
// List sources
|
||||
sources.MapGet("/", HandleListAsync)
|
||||
.WithName("scanner.sources.list")
|
||||
.Produces<PagedResponse<SourceResponse>>(StatusCodes.Status200OK)
|
||||
.RequireAuthorization(ScannerPolicies.SourcesRead);
|
||||
|
||||
// Get source by ID
|
||||
sources.MapGet("/{sourceId:guid}", HandleGetAsync)
|
||||
.WithName("scanner.sources.get")
|
||||
.Produces<SourceResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.SourcesRead);
|
||||
|
||||
// Get source by name
|
||||
sources.MapGet("/by-name/{name}", HandleGetByNameAsync)
|
||||
.WithName("scanner.sources.getByName")
|
||||
.Produces<SourceResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.SourcesRead);
|
||||
|
||||
// Create source
|
||||
sources.MapPost("/", HandleCreateAsync)
|
||||
.WithName("scanner.sources.create")
|
||||
.Produces<SourceResponse>(StatusCodes.Status201Created)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status409Conflict)
|
||||
.RequireAuthorization(ScannerPolicies.SourcesWrite);
|
||||
|
||||
// Update source
|
||||
sources.MapPut("/{sourceId:guid}", HandleUpdateAsync)
|
||||
.WithName("scanner.sources.update")
|
||||
.Produces<SourceResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.SourcesWrite);
|
||||
|
||||
// Delete source
|
||||
sources.MapDelete("/{sourceId:guid}", HandleDeleteAsync)
|
||||
.WithName("scanner.sources.delete")
|
||||
.Produces(StatusCodes.Status204NoContent)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.SourcesAdmin);
|
||||
|
||||
// Test connection (existing source)
|
||||
sources.MapPost("/{sourceId:guid}/test", HandleTestConnectionAsync)
|
||||
.WithName("scanner.sources.test")
|
||||
.Produces<ConnectionTestResult>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.SourcesRead);
|
||||
|
||||
// Test connection (new configuration)
|
||||
sources.MapPost("/test", HandleTestNewConnectionAsync)
|
||||
.WithName("scanner.sources.testNew")
|
||||
.Produces<ConnectionTestResult>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.RequireAuthorization(ScannerPolicies.SourcesWrite);
|
||||
|
||||
// Pause source
|
||||
sources.MapPost("/{sourceId:guid}/pause", HandlePauseAsync)
|
||||
.WithName("scanner.sources.pause")
|
||||
.Produces<SourceResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.SourcesWrite);
|
||||
|
||||
// Resume source
|
||||
sources.MapPost("/{sourceId:guid}/resume", HandleResumeAsync)
|
||||
.WithName("scanner.sources.resume")
|
||||
.Produces<SourceResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.SourcesWrite);
|
||||
|
||||
// Activate source (Draft -> Active)
|
||||
sources.MapPost("/{sourceId:guid}/activate", HandleActivateAsync)
|
||||
.WithName("scanner.sources.activate")
|
||||
.Produces<SourceResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.SourcesWrite);
|
||||
|
||||
// Trigger scan
|
||||
sources.MapPost("/{sourceId:guid}/scan", HandleTriggerScanAsync)
|
||||
.WithName("scanner.sources.trigger")
|
||||
.Produces<TriggerScanResult>(StatusCodes.Status202Accepted)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.SourcesWrite);
|
||||
|
||||
// List runs for a source
|
||||
sources.MapGet("/{sourceId:guid}/runs", HandleListRunsAsync)
|
||||
.WithName("scanner.sources.runs.list")
|
||||
.Produces<PagedResponse<SourceRunResponse>>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.SourcesRead);
|
||||
|
||||
// Get specific run
|
||||
sources.MapGet("/{sourceId:guid}/runs/{runId:guid}", HandleGetRunAsync)
|
||||
.WithName("scanner.sources.runs.get")
|
||||
.Produces<SourceRunResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.SourcesRead);
|
||||
|
||||
// Get source types metadata
|
||||
sources.MapGet("/types", HandleGetTypesAsync)
|
||||
.WithName("scanner.sources.types")
|
||||
.Produces<SourceTypesResponse>(StatusCodes.Status200OK)
|
||||
.RequireAuthorization(ScannerPolicies.SourcesRead);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleListAsync(
|
||||
[AsParameters] ListSourcesQueryParams queryParams,
|
||||
ISbomSourceService sourceService,
|
||||
ITenantContext tenantContext,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var tenantId = tenantContext.TenantId;
|
||||
if (string.IsNullOrEmpty(tenantId))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Tenant context required",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
var request = new ListSourcesRequest
|
||||
{
|
||||
SourceType = queryParams.Type,
|
||||
Status = queryParams.Status,
|
||||
NameContains = queryParams.Search,
|
||||
Cursor = queryParams.Cursor,
|
||||
Limit = queryParams.Limit ?? 50
|
||||
};
|
||||
|
||||
var result = await sourceService.ListAsync(tenantId, request, ct);
|
||||
return Json(result, StatusCodes.Status200OK);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleGetAsync(
|
||||
Guid sourceId,
|
||||
ISbomSourceService sourceService,
|
||||
ITenantContext tenantContext,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var tenantId = tenantContext.TenantId;
|
||||
if (string.IsNullOrEmpty(tenantId))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Tenant context required",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
var source = await sourceService.GetAsync(tenantId, sourceId, ct);
|
||||
if (source == null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound,
|
||||
detail: $"Source {sourceId} not found");
|
||||
}
|
||||
|
||||
return Json(source, StatusCodes.Status200OK);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleGetByNameAsync(
|
||||
string name,
|
||||
ISbomSourceService sourceService,
|
||||
ITenantContext tenantContext,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var tenantId = tenantContext.TenantId;
|
||||
if (string.IsNullOrEmpty(tenantId))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Tenant context required",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
var source = await sourceService.GetByNameAsync(tenantId, name, ct);
|
||||
if (source == null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound,
|
||||
detail: $"Source '{name}' not found");
|
||||
}
|
||||
|
||||
return Json(source, StatusCodes.Status200OK);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleCreateAsync(
|
||||
CreateSourceRequest request,
|
||||
ISbomSourceService sourceService,
|
||||
ITenantContext tenantContext,
|
||||
IUserContext userContext,
|
||||
LinkGenerator links,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var tenantId = tenantContext.TenantId;
|
||||
if (string.IsNullOrEmpty(tenantId))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Tenant context required",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
var userId = userContext.UserId ?? "system";
|
||||
|
||||
try
|
||||
{
|
||||
var source = await sourceService.CreateAsync(tenantId, request, userId, ct);
|
||||
|
||||
var location = links.GetPathByName(
|
||||
httpContext: context,
|
||||
endpointName: "scanner.sources.get",
|
||||
values: new { sourceId = source.SourceId });
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(location))
|
||||
{
|
||||
context.Response.Headers.Location = location;
|
||||
}
|
||||
|
||||
return Json(source, StatusCodes.Status201Created);
|
||||
}
|
||||
catch (InvalidOperationException ex)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Conflict,
|
||||
"Source already exists",
|
||||
StatusCodes.Status409Conflict,
|
||||
detail: ex.Message);
|
||||
}
|
||||
catch (ArgumentException ex)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid request",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleUpdateAsync(
|
||||
Guid sourceId,
|
||||
UpdateSourceRequest request,
|
||||
ISbomSourceService sourceService,
|
||||
ITenantContext tenantContext,
|
||||
IUserContext userContext,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var tenantId = tenantContext.TenantId;
|
||||
if (string.IsNullOrEmpty(tenantId))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Tenant context required",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
var userId = userContext.UserId ?? "system";
|
||||
|
||||
try
|
||||
{
|
||||
var source = await sourceService.UpdateAsync(tenantId, sourceId, request, userId, ct);
|
||||
return Json(source, StatusCodes.Status200OK);
|
||||
}
|
||||
catch (KeyNotFoundException)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
catch (InvalidOperationException ex)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Conflict,
|
||||
"Update conflict",
|
||||
StatusCodes.Status409Conflict,
|
||||
detail: ex.Message);
|
||||
}
|
||||
catch (ArgumentException ex)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid request",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleDeleteAsync(
|
||||
Guid sourceId,
|
||||
ISbomSourceService sourceService,
|
||||
ITenantContext tenantContext,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var tenantId = tenantContext.TenantId;
|
||||
if (string.IsNullOrEmpty(tenantId))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Tenant context required",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await sourceService.DeleteAsync(tenantId, sourceId, ct);
|
||||
return Results.NoContent();
|
||||
}
|
||||
catch (KeyNotFoundException)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleTestConnectionAsync(
|
||||
Guid sourceId,
|
||||
ISbomSourceService sourceService,
|
||||
ITenantContext tenantContext,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var tenantId = tenantContext.TenantId;
|
||||
if (string.IsNullOrEmpty(tenantId))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Tenant context required",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var result = await sourceService.TestConnectionAsync(tenantId, sourceId, ct);
|
||||
return Json(result, StatusCodes.Status200OK);
|
||||
}
|
||||
catch (KeyNotFoundException)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleTestNewConnectionAsync(
|
||||
TestConnectionRequest request,
|
||||
ISbomSourceService sourceService,
|
||||
ITenantContext tenantContext,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var tenantId = tenantContext.TenantId;
|
||||
if (string.IsNullOrEmpty(tenantId))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Tenant context required",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
var result = await sourceService.TestNewConnectionAsync(tenantId, request, ct);
|
||||
return Json(result, StatusCodes.Status200OK);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandlePauseAsync(
|
||||
Guid sourceId,
|
||||
PauseSourceRequest request,
|
||||
ISbomSourceService sourceService,
|
||||
ITenantContext tenantContext,
|
||||
IUserContext userContext,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var tenantId = tenantContext.TenantId;
|
||||
if (string.IsNullOrEmpty(tenantId))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Tenant context required",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
var userId = userContext.UserId ?? "system";
|
||||
|
||||
try
|
||||
{
|
||||
var source = await sourceService.PauseAsync(tenantId, sourceId, request, userId, ct);
|
||||
return Json(source, StatusCodes.Status200OK);
|
||||
}
|
||||
catch (KeyNotFoundException)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleResumeAsync(
|
||||
Guid sourceId,
|
||||
ISbomSourceService sourceService,
|
||||
ITenantContext tenantContext,
|
||||
IUserContext userContext,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var tenantId = tenantContext.TenantId;
|
||||
if (string.IsNullOrEmpty(tenantId))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Tenant context required",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
var userId = userContext.UserId ?? "system";
|
||||
|
||||
try
|
||||
{
|
||||
var source = await sourceService.ResumeAsync(tenantId, sourceId, userId, ct);
|
||||
return Json(source, StatusCodes.Status200OK);
|
||||
}
|
||||
catch (KeyNotFoundException)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleActivateAsync(
|
||||
Guid sourceId,
|
||||
ISbomSourceService sourceService,
|
||||
ITenantContext tenantContext,
|
||||
IUserContext userContext,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var tenantId = tenantContext.TenantId;
|
||||
if (string.IsNullOrEmpty(tenantId))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Tenant context required",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
var userId = userContext.UserId ?? "system";
|
||||
|
||||
try
|
||||
{
|
||||
var source = await sourceService.ActivateAsync(tenantId, sourceId, userId, ct);
|
||||
return Json(source, StatusCodes.Status200OK);
|
||||
}
|
||||
catch (KeyNotFoundException)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleTriggerScanAsync(
|
||||
Guid sourceId,
|
||||
TriggerScanRequest? request,
|
||||
ISbomSourceService sourceService,
|
||||
ITenantContext tenantContext,
|
||||
IUserContext userContext,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var tenantId = tenantContext.TenantId;
|
||||
if (string.IsNullOrEmpty(tenantId))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Tenant context required",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
var userId = userContext.UserId ?? "system";
|
||||
|
||||
try
|
||||
{
|
||||
var result = await sourceService.TriggerScanAsync(tenantId, sourceId, request, userId, ct);
|
||||
return Json(result, StatusCodes.Status202Accepted);
|
||||
}
|
||||
catch (KeyNotFoundException)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
catch (InvalidOperationException ex)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Cannot trigger scan",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleListRunsAsync(
|
||||
Guid sourceId,
|
||||
[AsParameters] ListRunsQueryParams queryParams,
|
||||
ISbomSourceService sourceService,
|
||||
ITenantContext tenantContext,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var tenantId = tenantContext.TenantId;
|
||||
if (string.IsNullOrEmpty(tenantId))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Tenant context required",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
var request = new ListSourceRunsRequest
|
||||
{
|
||||
Status = queryParams.Status,
|
||||
Trigger = queryParams.Trigger,
|
||||
Cursor = queryParams.Cursor,
|
||||
Limit = queryParams.Limit ?? 50
|
||||
};
|
||||
|
||||
try
|
||||
{
|
||||
var result = await sourceService.GetRunsAsync(tenantId, sourceId, request, ct);
|
||||
return Json(result, StatusCodes.Status200OK);
|
||||
}
|
||||
catch (KeyNotFoundException)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleGetRunAsync(
|
||||
Guid sourceId,
|
||||
Guid runId,
|
||||
ISbomSourceService sourceService,
|
||||
ITenantContext tenantContext,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var tenantId = tenantContext.TenantId;
|
||||
if (string.IsNullOrEmpty(tenantId))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Tenant context required",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var run = await sourceService.GetRunAsync(tenantId, sourceId, runId, ct);
|
||||
if (run == null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Run not found",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
|
||||
return Json(run, StatusCodes.Status200OK);
|
||||
}
|
||||
catch (KeyNotFoundException)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
}
|
||||
|
||||
private static Task<IResult> HandleGetTypesAsync(
|
||||
ISourceConfigValidator configValidator,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var types = new SourceTypesResponse
|
||||
{
|
||||
Types = Enum.GetValues<SbomSourceType>()
|
||||
.Select(t => new SourceTypeInfo
|
||||
{
|
||||
Type = t,
|
||||
Name = t.ToString(),
|
||||
Description = GetSourceTypeDescription(t),
|
||||
ConfigurationSchema = configValidator.GetConfigurationSchema(t)
|
||||
})
|
||||
.ToList()
|
||||
};
|
||||
|
||||
return Task.FromResult(Json(types, StatusCodes.Status200OK));
|
||||
}
|
||||
|
||||
private static string GetSourceTypeDescription(SbomSourceType type) => type switch
|
||||
{
|
||||
SbomSourceType.Zastava => "Container registry webhook - receives push events from Docker Hub, Harbor, ECR, etc.",
|
||||
SbomSourceType.Docker => "Docker image scanner - scans images on schedule or on-demand",
|
||||
SbomSourceType.Cli => "CLI submission endpoint - receives SBOMs from external tools via API",
|
||||
SbomSourceType.Git => "Git repository scanner - scans source code from GitHub, GitLab, Bitbucket, etc.",
|
||||
_ => "Unknown source type"
|
||||
};
|
||||
|
||||
private static IResult Json<T>(T value, int statusCode)
|
||||
{
|
||||
var payload = JsonSerializer.Serialize(value, SerializerOptions);
|
||||
return Results.Content(payload, "application/json", System.Text.Encoding.UTF8, statusCode);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Query parameters for listing sources.
|
||||
/// </summary>
|
||||
public record ListSourcesQueryParams
|
||||
{
|
||||
public SbomSourceType? Type { get; init; }
|
||||
public SbomSourceStatus? Status { get; init; }
|
||||
public string? Search { get; init; }
|
||||
public string? Cursor { get; init; }
|
||||
public int? Limit { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Query parameters for listing runs.
|
||||
/// </summary>
|
||||
public record ListRunsQueryParams
|
||||
{
|
||||
public SbomSourceRunStatus? Status { get; init; }
|
||||
public SbomSourceRunTrigger? Trigger { get; init; }
|
||||
public string? Cursor { get; init; }
|
||||
public int? Limit { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response containing source type information.
|
||||
/// </summary>
|
||||
public record SourceTypesResponse
|
||||
{
|
||||
public required List<SourceTypeInfo> Types { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Information about a source type.
|
||||
/// </summary>
|
||||
public record SourceTypeInfo
|
||||
{
|
||||
public required SbomSourceType Type { get; init; }
|
||||
public required string Name { get; init; }
|
||||
public required string Description { get; init; }
|
||||
public string? ConfigurationSchema { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,584 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using StellaOps.Scanner.Sources.Handlers;
|
||||
using StellaOps.Scanner.Sources.Persistence;
|
||||
using StellaOps.Scanner.Sources.Services;
|
||||
using StellaOps.Scanner.Sources.Triggers;
|
||||
using StellaOps.Scanner.WebService.Constants;
|
||||
using StellaOps.Scanner.WebService.Infrastructure;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
|
||||
/// <summary>
|
||||
/// Endpoints for receiving webhooks from container registries and Git providers.
|
||||
/// </summary>
|
||||
internal static class WebhookEndpoints
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Maps webhook endpoints for receiving push events.
|
||||
/// </summary>
|
||||
public static void MapWebhookEndpoints(this RouteGroupBuilder apiGroup, string webhookSegment = "/webhooks")
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(apiGroup);
|
||||
|
||||
var webhooks = apiGroup.MapGroup(webhookSegment);
|
||||
|
||||
// Generic webhook endpoint (uses sourceId in path)
|
||||
webhooks.MapPost("/{sourceId:guid}", HandleWebhookAsync)
|
||||
.WithName("scanner.webhooks.receive")
|
||||
.Produces(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status202Accepted)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status401Unauthorized)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.AllowAnonymous();
|
||||
|
||||
// Docker Hub webhook (uses source name for friendlier URLs)
|
||||
webhooks.MapPost("/docker/{sourceName}", HandleDockerHubWebhookAsync)
|
||||
.WithName("scanner.webhooks.dockerhub")
|
||||
.Produces(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status202Accepted)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status401Unauthorized)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.AllowAnonymous();
|
||||
|
||||
// GitHub webhook
|
||||
webhooks.MapPost("/github/{sourceName}", HandleGitHubWebhookAsync)
|
||||
.WithName("scanner.webhooks.github")
|
||||
.Produces(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status202Accepted)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status401Unauthorized)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.AllowAnonymous();
|
||||
|
||||
// GitLab webhook
|
||||
webhooks.MapPost("/gitlab/{sourceName}", HandleGitLabWebhookAsync)
|
||||
.WithName("scanner.webhooks.gitlab")
|
||||
.Produces(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status202Accepted)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status401Unauthorized)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.AllowAnonymous();
|
||||
|
||||
// Harbor webhook
|
||||
webhooks.MapPost("/harbor/{sourceName}", HandleHarborWebhookAsync)
|
||||
.WithName("scanner.webhooks.harbor")
|
||||
.Produces(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status202Accepted)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status401Unauthorized)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.AllowAnonymous();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Handle generic webhook by source ID.
|
||||
/// </summary>
|
||||
private static async Task<IResult> HandleWebhookAsync(
|
||||
Guid sourceId,
|
||||
[FromHeader(Name = "X-Hub-Signature-256")] string? signatureSha256,
|
||||
[FromHeader(Name = "X-Hub-Signature")] string? signatureSha1,
|
||||
[FromHeader(Name = "X-Gitlab-Token")] string? gitlabToken,
|
||||
[FromHeader(Name = "Authorization")] string? authorization,
|
||||
ISbomSourceRepository sourceRepository,
|
||||
IEnumerable<ISourceTypeHandler> handlers,
|
||||
ISourceTriggerDispatcher dispatcher,
|
||||
ICredentialResolver credentialResolver,
|
||||
ILogger<WebhookEndpoints> logger,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
// Read the raw payload
|
||||
using var reader = new StreamReader(context.Request.Body);
|
||||
var payloadString = await reader.ReadToEndAsync(ct);
|
||||
var payloadBytes = Encoding.UTF8.GetBytes(payloadString);
|
||||
|
||||
// Get the source
|
||||
var source = await sourceRepository.GetByIdAsync(null!, sourceId, ct);
|
||||
if (source == null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
|
||||
// Get the handler
|
||||
var handler = handlers.FirstOrDefault(h => h.SourceType == source.SourceType);
|
||||
if (handler == null || handler is not IWebhookCapableHandler webhookHandler)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Source does not support webhooks",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
// Determine signature to use
|
||||
var signature = signatureSha256 ?? signatureSha1 ?? gitlabToken ?? ExtractBearerToken(authorization);
|
||||
|
||||
// Verify signature if source has a webhook secret reference
|
||||
if (!string.IsNullOrEmpty(source.WebhookSecretRef))
|
||||
{
|
||||
if (string.IsNullOrEmpty(signature))
|
||||
{
|
||||
logger.LogWarning("Webhook received without signature for source {SourceId}", sourceId);
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Unauthorized,
|
||||
"Missing webhook signature",
|
||||
StatusCodes.Status401Unauthorized);
|
||||
}
|
||||
|
||||
// Resolve the webhook secret from the credential store
|
||||
var secretCredential = await credentialResolver.ResolveAsync(source.WebhookSecretRef, ct);
|
||||
var webhookSecret = secretCredential?.Token ?? secretCredential?.Password;
|
||||
|
||||
if (string.IsNullOrEmpty(webhookSecret))
|
||||
{
|
||||
logger.LogWarning("Failed to resolve webhook secret for source {SourceId}", sourceId);
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.InternalError,
|
||||
"Failed to resolve webhook secret",
|
||||
StatusCodes.Status500InternalServerError);
|
||||
}
|
||||
|
||||
if (!webhookHandler.VerifyWebhookSignature(payloadBytes, signature, webhookSecret))
|
||||
{
|
||||
logger.LogWarning("Invalid webhook signature for source {SourceId}", sourceId);
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Unauthorized,
|
||||
"Invalid webhook signature",
|
||||
StatusCodes.Status401Unauthorized);
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the payload
|
||||
JsonDocument payload;
|
||||
try
|
||||
{
|
||||
payload = JsonDocument.Parse(payloadString, new JsonDocumentOptions
|
||||
{
|
||||
AllowTrailingCommas = true
|
||||
});
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
logger.LogWarning(ex, "Invalid JSON payload for source {SourceId}", sourceId);
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid JSON payload",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
// Create trigger context
|
||||
var triggerContext = new TriggerContext
|
||||
{
|
||||
Trigger = SbomSourceRunTrigger.Webhook,
|
||||
TriggerDetails = $"Webhook from {context.Request.Headers["User-Agent"]}",
|
||||
CorrelationId = context.TraceIdentifier,
|
||||
WebhookPayload = payload
|
||||
};
|
||||
|
||||
// Dispatch the trigger
|
||||
try
|
||||
{
|
||||
var result = await dispatcher.DispatchAsync(sourceId, triggerContext, ct);
|
||||
|
||||
if (!result.Success)
|
||||
{
|
||||
logger.LogWarning(
|
||||
"Webhook dispatch failed for source {SourceId}: {Error}",
|
||||
sourceId, result.Error);
|
||||
|
||||
// Return 200 even on dispatch failure to prevent retries
|
||||
// The error is logged and tracked in the run record
|
||||
return Results.Ok(new WebhookResponse
|
||||
{
|
||||
Accepted = false,
|
||||
Message = result.Error,
|
||||
RunId = result.Run?.RunId
|
||||
});
|
||||
}
|
||||
|
||||
logger.LogInformation(
|
||||
"Webhook processed for source {SourceId}, run {RunId}, {JobCount} jobs queued",
|
||||
sourceId, result.Run?.RunId, result.JobsQueued);
|
||||
|
||||
return Results.Accepted(value: new WebhookResponse
|
||||
{
|
||||
Accepted = true,
|
||||
Message = $"Queued {result.JobsQueued} scan jobs",
|
||||
RunId = result.Run?.RunId,
|
||||
JobsQueued = result.JobsQueued
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Webhook processing failed for source {SourceId}", sourceId);
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.InternalError,
|
||||
"Webhook processing failed",
|
||||
StatusCodes.Status500InternalServerError,
|
||||
detail: ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Handle Docker Hub webhook by source name.
|
||||
/// </summary>
|
||||
private static async Task<IResult> HandleDockerHubWebhookAsync(
|
||||
string sourceName,
|
||||
ISbomSourceRepository sourceRepository,
|
||||
IEnumerable<ISourceTypeHandler> handlers,
|
||||
ISourceTriggerDispatcher dispatcher,
|
||||
ICredentialResolver credentialResolver,
|
||||
ILogger<WebhookEndpoints> logger,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
// Docker Hub uses callback_url for validation
|
||||
// and sends signature in body.callback_url when configured
|
||||
|
||||
var source = await FindSourceByNameAsync(sourceRepository, sourceName, SbomSourceType.Zastava, ct);
|
||||
if (source == null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
|
||||
return await ProcessWebhookAsync(
|
||||
source,
|
||||
handlers,
|
||||
dispatcher,
|
||||
credentialResolver,
|
||||
logger,
|
||||
context,
|
||||
signatureHeader: "X-Hub-Signature",
|
||||
ct);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Handle GitHub webhook by source name.
|
||||
/// </summary>
|
||||
private static async Task<IResult> HandleGitHubWebhookAsync(
|
||||
string sourceName,
|
||||
[FromHeader(Name = "X-GitHub-Event")] string? eventType,
|
||||
ISbomSourceRepository sourceRepository,
|
||||
IEnumerable<ISourceTypeHandler> handlers,
|
||||
ISourceTriggerDispatcher dispatcher,
|
||||
ICredentialResolver credentialResolver,
|
||||
ILogger<WebhookEndpoints> logger,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
// GitHub can send ping events for webhook validation
|
||||
if (eventType == "ping")
|
||||
{
|
||||
return Results.Ok(new { message = "pong" });
|
||||
}
|
||||
|
||||
// Only process push and pull_request events
|
||||
if (eventType != "push" && eventType != "pull_request" && eventType != "create")
|
||||
{
|
||||
return Results.Ok(new { message = $"Event type '{eventType}' ignored" });
|
||||
}
|
||||
|
||||
var source = await FindSourceByNameAsync(sourceRepository, sourceName, SbomSourceType.Git, ct);
|
||||
if (source == null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
|
||||
return await ProcessWebhookAsync(
|
||||
source,
|
||||
handlers,
|
||||
dispatcher,
|
||||
credentialResolver,
|
||||
logger,
|
||||
context,
|
||||
signatureHeader: "X-Hub-Signature-256",
|
||||
ct);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Handle GitLab webhook by source name.
|
||||
/// </summary>
|
||||
private static async Task<IResult> HandleGitLabWebhookAsync(
|
||||
string sourceName,
|
||||
[FromHeader(Name = "X-Gitlab-Event")] string? eventType,
|
||||
ISbomSourceRepository sourceRepository,
|
||||
IEnumerable<ISourceTypeHandler> handlers,
|
||||
ISourceTriggerDispatcher dispatcher,
|
||||
ICredentialResolver credentialResolver,
|
||||
ILogger<WebhookEndpoints> logger,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
// Only process push and merge request events
|
||||
if (eventType != "Push Hook" && eventType != "Merge Request Hook" && eventType != "Tag Push Hook")
|
||||
{
|
||||
return Results.Ok(new { message = $"Event type '{eventType}' ignored" });
|
||||
}
|
||||
|
||||
var source = await FindSourceByNameAsync(sourceRepository, sourceName, SbomSourceType.Git, ct);
|
||||
if (source == null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
|
||||
return await ProcessWebhookAsync(
|
||||
source,
|
||||
handlers,
|
||||
dispatcher,
|
||||
credentialResolver,
|
||||
logger,
|
||||
context,
|
||||
signatureHeader: "X-Gitlab-Token",
|
||||
ct);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Handle Harbor webhook by source name.
|
||||
/// </summary>
|
||||
private static async Task<IResult> HandleHarborWebhookAsync(
|
||||
string sourceName,
|
||||
ISbomSourceRepository sourceRepository,
|
||||
IEnumerable<ISourceTypeHandler> handlers,
|
||||
ISourceTriggerDispatcher dispatcher,
|
||||
ICredentialResolver credentialResolver,
|
||||
ILogger<WebhookEndpoints> logger,
|
||||
HttpContext context,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var source = await FindSourceByNameAsync(sourceRepository, sourceName, SbomSourceType.Zastava, ct);
|
||||
if (source == null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Source not found",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
|
||||
return await ProcessWebhookAsync(
|
||||
source,
|
||||
handlers,
|
||||
dispatcher,
|
||||
credentialResolver,
|
||||
logger,
|
||||
context,
|
||||
signatureHeader: "Authorization",
|
||||
ct);
|
||||
}
|
||||
|
||||
private static async Task<SbomSource?> FindSourceByNameAsync(
|
||||
ISbomSourceRepository repository,
|
||||
string name,
|
||||
SbomSourceType expectedType,
|
||||
CancellationToken ct)
|
||||
{
|
||||
// Search across all tenants for the source by name
|
||||
// Note: In production, this should be scoped to a specific tenant
|
||||
// extracted from the webhook URL or a custom header
|
||||
var sources = await repository.SearchByNameAsync(name, ct);
|
||||
return sources.FirstOrDefault(s => s.SourceType == expectedType);
|
||||
}
|
||||
|
||||
private static async Task<IResult> ProcessWebhookAsync(
|
||||
SbomSource source,
|
||||
IEnumerable<ISourceTypeHandler> handlers,
|
||||
ISourceTriggerDispatcher dispatcher,
|
||||
ICredentialResolver credentialResolver,
|
||||
ILogger<WebhookEndpoints> logger,
|
||||
HttpContext context,
|
||||
string signatureHeader,
|
||||
CancellationToken ct)
|
||||
{
|
||||
// Read the raw payload
|
||||
using var reader = new StreamReader(context.Request.Body);
|
||||
var payloadString = await reader.ReadToEndAsync(ct);
|
||||
var payloadBytes = Encoding.UTF8.GetBytes(payloadString);
|
||||
|
||||
// Get the handler
|
||||
var handler = handlers.FirstOrDefault(h => h.SourceType == source.SourceType);
|
||||
if (handler == null || handler is not IWebhookCapableHandler webhookHandler)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Source does not support webhooks",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
// Get signature from header
|
||||
string? signature = signatureHeader switch
|
||||
{
|
||||
"X-Hub-Signature-256" => context.Request.Headers["X-Hub-Signature-256"].FirstOrDefault(),
|
||||
"X-Hub-Signature" => context.Request.Headers["X-Hub-Signature"].FirstOrDefault(),
|
||||
"X-Gitlab-Token" => context.Request.Headers["X-Gitlab-Token"].FirstOrDefault(),
|
||||
"Authorization" => ExtractBearerToken(context.Request.Headers.Authorization.FirstOrDefault()),
|
||||
_ => null
|
||||
};
|
||||
|
||||
// Verify signature if source has a webhook secret reference
|
||||
if (!string.IsNullOrEmpty(source.WebhookSecretRef))
|
||||
{
|
||||
if (string.IsNullOrEmpty(signature))
|
||||
{
|
||||
logger.LogWarning("Webhook received without signature for source {SourceId}", source.SourceId);
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Unauthorized,
|
||||
"Missing webhook signature",
|
||||
StatusCodes.Status401Unauthorized);
|
||||
}
|
||||
|
||||
// Resolve the webhook secret from the credential store
|
||||
var secretCredential = await credentialResolver.ResolveAsync(source.WebhookSecretRef, ct);
|
||||
var webhookSecret = secretCredential?.Token ?? secretCredential?.Password;
|
||||
|
||||
if (string.IsNullOrEmpty(webhookSecret))
|
||||
{
|
||||
logger.LogWarning("Failed to resolve webhook secret for source {SourceId}", source.SourceId);
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.InternalError,
|
||||
"Failed to resolve webhook secret",
|
||||
StatusCodes.Status500InternalServerError);
|
||||
}
|
||||
|
||||
if (!webhookHandler.VerifyWebhookSignature(payloadBytes, signature, webhookSecret))
|
||||
{
|
||||
logger.LogWarning("Invalid webhook signature for source {SourceId}", source.SourceId);
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Unauthorized,
|
||||
"Invalid webhook signature",
|
||||
StatusCodes.Status401Unauthorized);
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the payload
|
||||
JsonDocument payload;
|
||||
try
|
||||
{
|
||||
payload = JsonDocument.Parse(payloadString, new JsonDocumentOptions
|
||||
{
|
||||
AllowTrailingCommas = true
|
||||
});
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
logger.LogWarning(ex, "Invalid JSON payload for source {SourceId}", source.SourceId);
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid JSON payload",
|
||||
StatusCodes.Status400BadRequest);
|
||||
}
|
||||
|
||||
// Create trigger context
|
||||
var triggerContext = new TriggerContext
|
||||
{
|
||||
Trigger = SbomSourceRunTrigger.Webhook,
|
||||
TriggerDetails = $"Webhook from {context.Request.Headers["User-Agent"]}",
|
||||
CorrelationId = context.TraceIdentifier,
|
||||
WebhookPayload = payload
|
||||
};
|
||||
|
||||
// Dispatch the trigger
|
||||
try
|
||||
{
|
||||
var result = await dispatcher.DispatchAsync(source.SourceId, triggerContext, ct);
|
||||
|
||||
if (!result.Success)
|
||||
{
|
||||
logger.LogWarning(
|
||||
"Webhook dispatch failed for source {SourceId}: {Error}",
|
||||
source.SourceId, result.Error);
|
||||
|
||||
return Results.Ok(new WebhookResponse
|
||||
{
|
||||
Accepted = false,
|
||||
Message = result.Error,
|
||||
RunId = result.Run?.RunId
|
||||
});
|
||||
}
|
||||
|
||||
logger.LogInformation(
|
||||
"Webhook processed for source {SourceId}, run {RunId}, {JobCount} jobs queued",
|
||||
source.SourceId, result.Run?.RunId, result.JobsQueued);
|
||||
|
||||
return Results.Accepted(value: new WebhookResponse
|
||||
{
|
||||
Accepted = true,
|
||||
Message = $"Queued {result.JobsQueued} scan jobs",
|
||||
RunId = result.Run?.RunId,
|
||||
JobsQueued = result.JobsQueued
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Webhook processing failed for source {SourceId}", source.SourceId);
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.InternalError,
|
||||
"Webhook processing failed",
|
||||
StatusCodes.Status500InternalServerError,
|
||||
detail: ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
private static string? ExtractBearerToken(string? authHeader)
|
||||
{
|
||||
if (string.IsNullOrEmpty(authHeader))
|
||||
return null;
|
||||
|
||||
if (authHeader.StartsWith("Bearer ", StringComparison.OrdinalIgnoreCase))
|
||||
return authHeader[7..];
|
||||
|
||||
return authHeader;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response for webhook processing.
|
||||
/// </summary>
|
||||
public record WebhookResponse
|
||||
{
|
||||
public bool Accepted { get; init; }
|
||||
public string? Message { get; init; }
|
||||
public Guid? RunId { get; init; }
|
||||
public int JobsQueued { get; init; }
|
||||
}
|
||||
@@ -19,4 +19,9 @@ internal static class ScannerPolicies
|
||||
|
||||
// Admin policies
|
||||
public const string Admin = "scanner.admin";
|
||||
|
||||
// Sources policies
|
||||
public const string SourcesRead = "scanner.sources.read";
|
||||
public const string SourcesWrite = "scanner.sources.write";
|
||||
public const string SourcesAdmin = "scanner.sources.admin";
|
||||
}
|
||||
|
||||
@@ -46,6 +46,7 @@
|
||||
<ProjectReference Include="../../Concelier/__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="../../Router/__Libraries/StellaOps.Messaging/StellaOps.Messaging.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Orchestration/StellaOps.Scanner.Orchestration.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Sources/StellaOps.Scanner.Sources.csproj" />
|
||||
<ProjectReference Include="../../Router/__Libraries/StellaOps.Router.AspNet/StellaOps.Router.AspNet.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
@@ -59,6 +59,18 @@ public enum RegistryType
|
||||
/// <summary>JFrog Artifactory.</summary>
|
||||
Artifactory,
|
||||
|
||||
/// <summary>GitLab Container Registry.</summary>
|
||||
GitLab,
|
||||
|
||||
/// <summary>Sonatype Nexus Registry.</summary>
|
||||
Nexus,
|
||||
|
||||
/// <summary>JFrog Container Registry (standalone).</summary>
|
||||
JFrog,
|
||||
|
||||
/// <summary>Custom/self-hosted OCI registry.</summary>
|
||||
Custom,
|
||||
|
||||
/// <summary>Generic registry with configurable payload mapping.</summary>
|
||||
Generic
|
||||
}
|
||||
@@ -83,6 +95,25 @@ public sealed record ZastavaFilters
|
||||
/// <summary>Tag patterns to exclude (glob patterns).</summary>
|
||||
[JsonPropertyName("excludeTags")]
|
||||
public string[]? ExcludeTags { get; init; }
|
||||
|
||||
// Computed properties for handler compatibility
|
||||
[JsonIgnore]
|
||||
public IReadOnlyList<string> RepositoryPatterns => Repositories;
|
||||
|
||||
[JsonIgnore]
|
||||
public IReadOnlyList<string> TagPatterns => Tags;
|
||||
|
||||
[JsonIgnore]
|
||||
public IReadOnlyList<string>? ExcludePatterns
|
||||
{
|
||||
get
|
||||
{
|
||||
var combined = new List<string>();
|
||||
if (ExcludeRepositories != null) combined.AddRange(ExcludeRepositories);
|
||||
if (ExcludeTags != null) combined.AddRange(ExcludeTags);
|
||||
return combined.Count > 0 ? combined : null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -0,0 +1,119 @@
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Sources.Configuration;
|
||||
using StellaOps.Scanner.Sources.Contracts;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using StellaOps.Scanner.Sources.Services;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.ConnectionTesters;
|
||||
|
||||
/// <summary>
|
||||
/// Connection tester for CLI sources.
|
||||
/// CLI sources are passive endpoints - they receive SBOMs from external tools.
|
||||
/// This tester validates the configuration rather than testing a connection.
|
||||
/// </summary>
|
||||
public sealed class CliConnectionTester : ISourceTypeConnectionTester
|
||||
{
|
||||
private readonly ICredentialResolver _credentialResolver;
|
||||
private readonly ILogger<CliConnectionTester> _logger;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
public SbomSourceType SourceType => SbomSourceType.Cli;
|
||||
|
||||
public CliConnectionTester(
|
||||
ICredentialResolver credentialResolver,
|
||||
ILogger<CliConnectionTester> logger)
|
||||
{
|
||||
_credentialResolver = credentialResolver;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<ConnectionTestResult> TestAsync(
|
||||
SbomSource source,
|
||||
JsonDocument? overrideCredentials,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var config = source.Configuration.Deserialize<CliSourceConfig>(JsonOptions);
|
||||
if (config == null)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Invalid configuration format",
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
var details = new Dictionary<string, object>
|
||||
{
|
||||
["sourceType"] = "CLI",
|
||||
["endpointType"] = "passive"
|
||||
};
|
||||
|
||||
// CLI sources are passive - validate configuration instead
|
||||
var validationIssues = new List<string>();
|
||||
|
||||
// Check accepted formats
|
||||
if (config.Validation.AllowedFormats is { Length: > 0 })
|
||||
{
|
||||
details["acceptedFormats"] = config.Validation.AllowedFormats.Select(f => f.ToString()).ToList();
|
||||
}
|
||||
else
|
||||
{
|
||||
details["acceptedFormats"] = "all";
|
||||
}
|
||||
|
||||
// Check validation rules
|
||||
if (config.Validation.RequireSignedSbom)
|
||||
{
|
||||
details["requiresSignature"] = true;
|
||||
}
|
||||
|
||||
if (config.Validation.MaxSbomSizeBytes > 0)
|
||||
{
|
||||
details["maxFileSizeBytes"] = config.Validation.MaxSbomSizeBytes;
|
||||
}
|
||||
|
||||
// Check if auth reference is valid (if provided)
|
||||
if (!string.IsNullOrEmpty(source.AuthRef))
|
||||
{
|
||||
var authValid = await _credentialResolver.ValidateRefAsync(source.AuthRef, ct);
|
||||
if (!authValid)
|
||||
{
|
||||
validationIssues.Add("AuthRef credential not found or inaccessible");
|
||||
}
|
||||
else
|
||||
{
|
||||
details["authConfigured"] = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Generate webhook URL info
|
||||
details["note"] = "CLI sources receive SBOMs via API endpoint";
|
||||
details["submissionEndpoint"] = $"/api/v1/sources/{source.SourceId}/sbom";
|
||||
|
||||
if (validationIssues.Count > 0)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = $"Configuration issues: {string.Join("; ", validationIssues)}",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
};
|
||||
}
|
||||
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = true,
|
||||
Message = "CLI source configuration is valid - ready to receive SBOMs",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,303 @@
|
||||
using System.Net;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Sources.Configuration;
|
||||
using StellaOps.Scanner.Sources.Contracts;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using StellaOps.Scanner.Sources.Services;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.ConnectionTesters;
|
||||
|
||||
/// <summary>
|
||||
/// Tests connection to Docker registries for scheduled image scanning.
|
||||
/// </summary>
|
||||
public sealed class DockerConnectionTester : ISourceTypeConnectionTester
|
||||
{
|
||||
private readonly IHttpClientFactory _httpClientFactory;
|
||||
private readonly ICredentialResolver _credentialResolver;
|
||||
private readonly ILogger<DockerConnectionTester> _logger;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
public SbomSourceType SourceType => SbomSourceType.Docker;
|
||||
|
||||
public DockerConnectionTester(
|
||||
IHttpClientFactory httpClientFactory,
|
||||
ICredentialResolver credentialResolver,
|
||||
ILogger<DockerConnectionTester> logger)
|
||||
{
|
||||
_httpClientFactory = httpClientFactory;
|
||||
_credentialResolver = credentialResolver;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<ConnectionTestResult> TestAsync(
|
||||
SbomSource source,
|
||||
JsonDocument? overrideCredentials,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var config = source.Configuration.Deserialize<DockerSourceConfig>(JsonOptions);
|
||||
if (config == null)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Invalid configuration format",
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
var client = _httpClientFactory.CreateClient("SourceConnectionTest");
|
||||
client.Timeout = TimeSpan.FromSeconds(30);
|
||||
|
||||
// Get credentials
|
||||
string? authHeader = null;
|
||||
if (overrideCredentials != null)
|
||||
{
|
||||
authHeader = ExtractAuthFromTestCredentials(overrideCredentials);
|
||||
}
|
||||
else if (!string.IsNullOrEmpty(source.AuthRef))
|
||||
{
|
||||
var creds = await _credentialResolver.ResolveAsync(source.AuthRef, ct);
|
||||
authHeader = BuildAuthHeader(creds);
|
||||
}
|
||||
|
||||
if (authHeader != null)
|
||||
{
|
||||
client.DefaultRequestHeaders.TryAddWithoutValidation("Authorization", authHeader);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// Determine registry URL
|
||||
var registryUrl = GetRegistryUrl(config);
|
||||
var testUrl = $"{registryUrl}/v2/";
|
||||
|
||||
var response = await client.GetAsync(testUrl, ct);
|
||||
|
||||
var details = new Dictionary<string, object>
|
||||
{
|
||||
["registryUrl"] = registryUrl,
|
||||
["statusCode"] = (int)response.StatusCode
|
||||
};
|
||||
|
||||
// Test image access if we have specific images configured
|
||||
if (response.IsSuccessStatusCode && config.Images.Length > 0)
|
||||
{
|
||||
var firstImage = config.Images[0];
|
||||
var imageTestResult = await TestImageAccess(
|
||||
client, registryUrl, firstImage, ct);
|
||||
|
||||
details["imageTest"] = imageTestResult;
|
||||
|
||||
if (!imageTestResult.Success)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = $"Registry accessible but image test failed: {imageTestResult.Message}",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = true,
|
||||
Message = "Successfully connected to Docker registry",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
};
|
||||
}
|
||||
|
||||
details["responseBody"] = await TruncateResponseBody(response, ct);
|
||||
|
||||
return response.StatusCode switch
|
||||
{
|
||||
HttpStatusCode.Unauthorized => new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Authentication required - configure credentials",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
},
|
||||
HttpStatusCode.Forbidden => new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Access denied - check permissions",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
},
|
||||
_ => new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = $"Registry returned {response.StatusCode}",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "HTTP error testing Docker connection");
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = $"Connection failed: {ex.Message}",
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
catch (TaskCanceledException) when (!ct.IsCancellationRequested)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Connection timed out",
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static string GetRegistryUrl(DockerSourceConfig config)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(config.RegistryUrl))
|
||||
{
|
||||
return config.RegistryUrl.TrimEnd('/');
|
||||
}
|
||||
|
||||
// Default to Docker Hub
|
||||
return "https://registry-1.docker.io";
|
||||
}
|
||||
|
||||
private async Task<ImageTestResult> TestImageAccess(
|
||||
HttpClient client,
|
||||
string registryUrl,
|
||||
ImageSpec image,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var repository = GetRepositoryFromReference(image.Reference);
|
||||
|
||||
try
|
||||
{
|
||||
// Try to fetch image manifest tags
|
||||
var tagsUrl = $"{registryUrl}/v2/{repository}/tags/list";
|
||||
var response = await client.GetAsync(tagsUrl, ct);
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
var content = await response.Content.ReadAsStringAsync(ct);
|
||||
return new ImageTestResult
|
||||
{
|
||||
Success = true,
|
||||
Message = "Image repository accessible",
|
||||
Repository = repository
|
||||
};
|
||||
}
|
||||
|
||||
return new ImageTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = $"Cannot access repository: {response.StatusCode}",
|
||||
Repository = repository
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new ImageTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = $"Error accessing repository: {ex.Message}",
|
||||
Repository = repository
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static string GetRepositoryFromReference(string reference)
|
||||
{
|
||||
// Reference format: [registry/]repo[/subpath]:tag or [registry/]repo[/subpath]@sha256:digest
|
||||
// Strip the tag or digest
|
||||
var atIdx = reference.IndexOf('@');
|
||||
var colonIdx = reference.LastIndexOf(':');
|
||||
|
||||
string repoWithRegistry;
|
||||
if (atIdx > 0)
|
||||
{
|
||||
repoWithRegistry = reference[..atIdx];
|
||||
}
|
||||
else if (colonIdx > 0 && !reference[..colonIdx].Contains('/'))
|
||||
{
|
||||
// Simple format like "nginx:latest" - no registry prefix
|
||||
repoWithRegistry = reference[..colonIdx];
|
||||
}
|
||||
else if (colonIdx > 0)
|
||||
{
|
||||
repoWithRegistry = reference[..colonIdx];
|
||||
}
|
||||
else
|
||||
{
|
||||
repoWithRegistry = reference;
|
||||
}
|
||||
|
||||
// For Docker Hub, prepend "library/" for official images
|
||||
if (!repoWithRegistry.Contains('/'))
|
||||
{
|
||||
return $"library/{repoWithRegistry}";
|
||||
}
|
||||
|
||||
return repoWithRegistry;
|
||||
}
|
||||
|
||||
private static string? ExtractAuthFromTestCredentials(JsonDocument credentials)
|
||||
{
|
||||
var root = credentials.RootElement;
|
||||
|
||||
if (root.TryGetProperty("token", out var token))
|
||||
{
|
||||
return $"Bearer {token.GetString()}";
|
||||
}
|
||||
|
||||
if (root.TryGetProperty("username", out var username) &&
|
||||
root.TryGetProperty("password", out var password))
|
||||
{
|
||||
var encoded = Convert.ToBase64String(
|
||||
System.Text.Encoding.UTF8.GetBytes(
|
||||
$"{username.GetString()}:{password.GetString()}"));
|
||||
return $"Basic {encoded}";
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string? BuildAuthHeader(ResolvedCredential? credential)
|
||||
{
|
||||
if (credential == null) return null;
|
||||
|
||||
return credential.Type switch
|
||||
{
|
||||
CredentialType.BearerToken => $"Bearer {credential.Token}",
|
||||
CredentialType.BasicAuth => $"Basic {Convert.ToBase64String(
|
||||
System.Text.Encoding.UTF8.GetBytes($"{credential.Username}:{credential.Password}"))}",
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<string> TruncateResponseBody(HttpResponseMessage response, CancellationToken ct)
|
||||
{
|
||||
var body = await response.Content.ReadAsStringAsync(ct);
|
||||
return body.Length > 500 ? body[..500] + "..." : body;
|
||||
}
|
||||
|
||||
private sealed record ImageTestResult
|
||||
{
|
||||
public bool Success { get; init; }
|
||||
public string Message { get; init; } = "";
|
||||
public string Repository { get; init; } = "";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,389 @@
|
||||
using System.Net;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Sources.Configuration;
|
||||
using StellaOps.Scanner.Sources.Contracts;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using StellaOps.Scanner.Sources.Services;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.ConnectionTesters;
|
||||
|
||||
/// <summary>
|
||||
/// Tests connection to Git repositories for source scanning.
|
||||
/// </summary>
|
||||
public sealed class GitConnectionTester : ISourceTypeConnectionTester
|
||||
{
|
||||
private readonly IHttpClientFactory _httpClientFactory;
|
||||
private readonly ICredentialResolver _credentialResolver;
|
||||
private readonly ILogger<GitConnectionTester> _logger;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
public SbomSourceType SourceType => SbomSourceType.Git;
|
||||
|
||||
public GitConnectionTester(
|
||||
IHttpClientFactory httpClientFactory,
|
||||
ICredentialResolver credentialResolver,
|
||||
ILogger<GitConnectionTester> logger)
|
||||
{
|
||||
_httpClientFactory = httpClientFactory;
|
||||
_credentialResolver = credentialResolver;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<ConnectionTestResult> TestAsync(
|
||||
SbomSource source,
|
||||
JsonDocument? overrideCredentials,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var config = source.Configuration.Deserialize<GitSourceConfig>(JsonOptions);
|
||||
if (config == null)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Invalid configuration format",
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
// Determine the test approach based on URL type
|
||||
var repoUrl = config.RepositoryUrl;
|
||||
|
||||
if (IsSshUrl(repoUrl))
|
||||
{
|
||||
// SSH URLs require different testing approach
|
||||
return await TestSshConnection(source, config, overrideCredentials, ct);
|
||||
}
|
||||
|
||||
// HTTPS URLs can be tested via API
|
||||
return await TestHttpsConnection(source, config, overrideCredentials, ct);
|
||||
}
|
||||
|
||||
private async Task<ConnectionTestResult> TestHttpsConnection(
|
||||
SbomSource source,
|
||||
GitSourceConfig config,
|
||||
JsonDocument? overrideCredentials,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var client = _httpClientFactory.CreateClient("SourceConnectionTest");
|
||||
client.Timeout = TimeSpan.FromSeconds(30);
|
||||
|
||||
// Build auth header
|
||||
string? authHeader = null;
|
||||
if (overrideCredentials != null)
|
||||
{
|
||||
authHeader = ExtractAuthFromTestCredentials(overrideCredentials);
|
||||
}
|
||||
else if (!string.IsNullOrEmpty(source.AuthRef))
|
||||
{
|
||||
var creds = await _credentialResolver.ResolveAsync(source.AuthRef, ct);
|
||||
authHeader = BuildAuthHeader(creds);
|
||||
}
|
||||
|
||||
if (authHeader != null)
|
||||
{
|
||||
client.DefaultRequestHeaders.TryAddWithoutValidation("Authorization", authHeader);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var testUrl = BuildApiTestUrl(config);
|
||||
if (testUrl == null)
|
||||
{
|
||||
// Fall back to git info/refs
|
||||
testUrl = GetGitInfoRefsUrl(config.RepositoryUrl);
|
||||
}
|
||||
|
||||
_logger.LogDebug("Testing Git connection to {Url}", testUrl);
|
||||
|
||||
var response = await client.GetAsync(testUrl, ct);
|
||||
|
||||
var details = new Dictionary<string, object>
|
||||
{
|
||||
["repositoryUrl"] = config.RepositoryUrl,
|
||||
["provider"] = config.Provider.ToString(),
|
||||
["statusCode"] = (int)response.StatusCode
|
||||
};
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
// Try to extract additional info
|
||||
{
|
||||
var repoInfo = await ExtractRepoInfo(response, config.Provider, ct);
|
||||
if (repoInfo != null)
|
||||
{
|
||||
details["defaultBranch"] = repoInfo.DefaultBranch;
|
||||
details["visibility"] = repoInfo.IsPrivate ? "private" : "public";
|
||||
}
|
||||
}
|
||||
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = true,
|
||||
Message = "Successfully connected to Git repository",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
};
|
||||
}
|
||||
|
||||
details["responseBody"] = await TruncateResponseBody(response, ct);
|
||||
|
||||
return response.StatusCode switch
|
||||
{
|
||||
HttpStatusCode.Unauthorized => new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Authentication required - configure credentials",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
},
|
||||
HttpStatusCode.Forbidden => new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Access denied - check token permissions",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
},
|
||||
HttpStatusCode.NotFound => new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Repository not found - check URL and access",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
},
|
||||
_ => new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = $"Server returned {response.StatusCode}",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "HTTP error testing Git connection");
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = $"Connection failed: {ex.Message}",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = new Dictionary<string, object>
|
||||
{
|
||||
["repositoryUrl"] = config.RepositoryUrl
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (TaskCanceledException) when (!ct.IsCancellationRequested)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Connection timed out",
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private Task<ConnectionTestResult> TestSshConnection(
|
||||
SbomSource source,
|
||||
GitSourceConfig config,
|
||||
JsonDocument? overrideCredentials,
|
||||
CancellationToken ct)
|
||||
{
|
||||
// SSH connection testing requires actual SSH client
|
||||
// For now, return a message that SSH will be validated on first scan
|
||||
return Task.FromResult(new ConnectionTestResult
|
||||
{
|
||||
Success = true,
|
||||
Message = "SSH configuration accepted - connection will be validated on first scan",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = new Dictionary<string, object>
|
||||
{
|
||||
["repositoryUrl"] = config.RepositoryUrl,
|
||||
["authMethod"] = config.AuthMethod.ToString(),
|
||||
["note"] = "Full SSH validation requires runtime execution"
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static bool IsSshUrl(string url)
|
||||
{
|
||||
return url.StartsWith("git@", StringComparison.OrdinalIgnoreCase) ||
|
||||
url.StartsWith("ssh://", StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
private static string? BuildApiTestUrl(GitSourceConfig config)
|
||||
{
|
||||
// Parse owner/repo from URL
|
||||
var (owner, repo) = ParseRepoPath(config.RepositoryUrl);
|
||||
if (owner == null || repo == null)
|
||||
return null;
|
||||
|
||||
return config.Provider switch
|
||||
{
|
||||
GitProvider.GitHub => $"https://api.github.com/repos/{owner}/{repo}",
|
||||
GitProvider.GitLab => BuildGitLabApiUrl(config.RepositoryUrl, owner, repo),
|
||||
GitProvider.Bitbucket => $"https://api.bitbucket.org/2.0/repositories/{owner}/{repo}",
|
||||
GitProvider.AzureDevOps => null, // Azure DevOps requires different approach
|
||||
GitProvider.Gitea => BuildGiteaApiUrl(config.RepositoryUrl, owner, repo),
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
private static string GetGitInfoRefsUrl(string repoUrl)
|
||||
{
|
||||
var baseUrl = repoUrl.TrimEnd('/');
|
||||
if (!baseUrl.EndsWith(".git"))
|
||||
{
|
||||
baseUrl += ".git";
|
||||
}
|
||||
return $"{baseUrl}/info/refs?service=git-upload-pack";
|
||||
}
|
||||
|
||||
private static string BuildGitLabApiUrl(string repoUrl, string owner, string repo)
|
||||
{
|
||||
// Extract GitLab host from URL
|
||||
var uri = new Uri(repoUrl.Replace("git@", "https://").Replace(":", "/"));
|
||||
var host = uri.Host;
|
||||
var encodedPath = Uri.EscapeDataString($"{owner}/{repo}");
|
||||
return $"https://{host}/api/v4/projects/{encodedPath}";
|
||||
}
|
||||
|
||||
private static string BuildGiteaApiUrl(string repoUrl, string owner, string repo)
|
||||
{
|
||||
var uri = new Uri(repoUrl.Replace("git@", "https://").Replace(":", "/"));
|
||||
var host = uri.Host;
|
||||
return $"https://{host}/api/v1/repos/{owner}/{repo}";
|
||||
}
|
||||
|
||||
private static (string? Owner, string? Repo) ParseRepoPath(string url)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Handle SSH URLs: git@github.com:owner/repo.git
|
||||
if (url.StartsWith("git@"))
|
||||
{
|
||||
var colonIdx = url.IndexOf(':');
|
||||
if (colonIdx > 0)
|
||||
{
|
||||
var path = url[(colonIdx + 1)..].TrimEnd('/');
|
||||
if (path.EndsWith(".git"))
|
||||
path = path[..^4];
|
||||
var parts = path.Split('/');
|
||||
if (parts.Length >= 2)
|
||||
return (parts[0], parts[1]);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle HTTPS URLs
|
||||
var uri = new Uri(url);
|
||||
var segments = uri.AbsolutePath.Trim('/').Split('/');
|
||||
if (segments.Length >= 2)
|
||||
{
|
||||
var repo = segments[1];
|
||||
if (repo.EndsWith(".git"))
|
||||
repo = repo[..^4];
|
||||
return (segments[0], repo);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// URL parsing failed
|
||||
}
|
||||
|
||||
return (null, null);
|
||||
}
|
||||
|
||||
private static string? ExtractAuthFromTestCredentials(JsonDocument credentials)
|
||||
{
|
||||
var root = credentials.RootElement;
|
||||
|
||||
if (root.TryGetProperty("token", out var token))
|
||||
{
|
||||
var tokenStr = token.GetString();
|
||||
// GitHub tokens are prefixed with ghp_, gho_, etc.
|
||||
// GitLab tokens are prefixed with glpat-
|
||||
// For most providers, use Bearer auth
|
||||
return $"Bearer {tokenStr}";
|
||||
}
|
||||
|
||||
if (root.TryGetProperty("username", out var username) &&
|
||||
root.TryGetProperty("password", out var password))
|
||||
{
|
||||
var encoded = Convert.ToBase64String(
|
||||
System.Text.Encoding.UTF8.GetBytes(
|
||||
$"{username.GetString()}:{password.GetString()}"));
|
||||
return $"Basic {encoded}";
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string? BuildAuthHeader(ResolvedCredential? credential)
|
||||
{
|
||||
if (credential == null) return null;
|
||||
|
||||
return credential.Type switch
|
||||
{
|
||||
CredentialType.BearerToken => $"Bearer {credential.Token}",
|
||||
CredentialType.BasicAuth => $"Basic {Convert.ToBase64String(
|
||||
System.Text.Encoding.UTF8.GetBytes($"{credential.Username}:{credential.Password}"))}",
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<string> TruncateResponseBody(HttpResponseMessage response, CancellationToken ct)
|
||||
{
|
||||
var body = await response.Content.ReadAsStringAsync(ct);
|
||||
return body.Length > 500 ? body[..500] + "..." : body;
|
||||
}
|
||||
|
||||
private async Task<RepoInfo?> ExtractRepoInfo(
|
||||
HttpResponseMessage response,
|
||||
GitProvider provider,
|
||||
CancellationToken ct)
|
||||
{
|
||||
try
|
||||
{
|
||||
var json = await response.Content.ReadAsStringAsync(ct);
|
||||
using var doc = JsonDocument.Parse(json);
|
||||
var root = doc.RootElement;
|
||||
|
||||
return provider switch
|
||||
{
|
||||
GitProvider.GitHub => new RepoInfo
|
||||
{
|
||||
DefaultBranch = root.TryGetProperty("default_branch", out var db)
|
||||
? db.GetString() ?? "main"
|
||||
: "main",
|
||||
IsPrivate = root.TryGetProperty("private", out var priv) && priv.GetBoolean()
|
||||
},
|
||||
GitProvider.GitLab => new RepoInfo
|
||||
{
|
||||
DefaultBranch = root.TryGetProperty("default_branch", out var db)
|
||||
? db.GetString() ?? "main"
|
||||
: "main",
|
||||
IsPrivate = root.TryGetProperty("visibility", out var vis)
|
||||
&& vis.GetString() == "private"
|
||||
},
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record RepoInfo
|
||||
{
|
||||
public string DefaultBranch { get; init; } = "main";
|
||||
public bool IsPrivate { get; init; }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,231 @@
|
||||
using System.Net;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Sources.Configuration;
|
||||
using StellaOps.Scanner.Sources.Contracts;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using StellaOps.Scanner.Sources.Services;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.ConnectionTesters;
|
||||
|
||||
/// <summary>
|
||||
/// Tests connection to container registries for Zastava webhook sources.
|
||||
/// </summary>
|
||||
public sealed class ZastavaConnectionTester : ISourceTypeConnectionTester
|
||||
{
|
||||
private readonly IHttpClientFactory _httpClientFactory;
|
||||
private readonly ICredentialResolver _credentialResolver;
|
||||
private readonly ILogger<ZastavaConnectionTester> _logger;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
public SbomSourceType SourceType => SbomSourceType.Zastava;
|
||||
|
||||
public ZastavaConnectionTester(
|
||||
IHttpClientFactory httpClientFactory,
|
||||
ICredentialResolver credentialResolver,
|
||||
ILogger<ZastavaConnectionTester> logger)
|
||||
{
|
||||
_httpClientFactory = httpClientFactory;
|
||||
_credentialResolver = credentialResolver;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<ConnectionTestResult> TestAsync(
|
||||
SbomSource source,
|
||||
JsonDocument? overrideCredentials,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var config = source.Configuration.Deserialize<ZastavaSourceConfig>(JsonOptions);
|
||||
if (config == null)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Invalid configuration format",
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
var client = _httpClientFactory.CreateClient("SourceConnectionTest");
|
||||
client.Timeout = TimeSpan.FromSeconds(30);
|
||||
|
||||
// Get credentials
|
||||
string? authHeader = null;
|
||||
if (overrideCredentials != null)
|
||||
{
|
||||
authHeader = ExtractAuthFromTestCredentials(overrideCredentials);
|
||||
}
|
||||
else if (!string.IsNullOrEmpty(source.AuthRef))
|
||||
{
|
||||
var creds = await _credentialResolver.ResolveAsync(source.AuthRef, ct);
|
||||
authHeader = BuildAuthHeader(creds);
|
||||
}
|
||||
|
||||
if (authHeader != null)
|
||||
{
|
||||
client.DefaultRequestHeaders.TryAddWithoutValidation("Authorization", authHeader);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var testUrl = BuildRegistryTestUrl(config);
|
||||
var response = await client.GetAsync(testUrl, ct);
|
||||
|
||||
var details = new Dictionary<string, object>
|
||||
{
|
||||
["registryType"] = config.RegistryType.ToString(),
|
||||
["registryUrl"] = config.RegistryUrl,
|
||||
["statusCode"] = (int)response.StatusCode
|
||||
};
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = true,
|
||||
Message = "Successfully connected to registry",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
};
|
||||
}
|
||||
|
||||
// Handle specific error codes
|
||||
details["responseBody"] = await TruncateResponseBody(response, ct);
|
||||
|
||||
return response.StatusCode switch
|
||||
{
|
||||
HttpStatusCode.Unauthorized => new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Authentication failed - check credentials",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
},
|
||||
HttpStatusCode.Forbidden => new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Access denied - insufficient permissions",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
},
|
||||
HttpStatusCode.NotFound => new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Registry endpoint not found - check URL",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
},
|
||||
_ => new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = $"Registry returned {response.StatusCode}",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = details
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "HTTP error testing Zastava connection to {Url}", config.RegistryUrl);
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = $"Connection failed: {ex.Message}",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = new Dictionary<string, object>
|
||||
{
|
||||
["registryUrl"] = config.RegistryUrl,
|
||||
["errorType"] = "HttpRequestException"
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (TaskCanceledException) when (!ct.IsCancellationRequested)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Connection timed out",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = new Dictionary<string, object>
|
||||
{
|
||||
["registryUrl"] = config.RegistryUrl,
|
||||
["errorType"] = "Timeout"
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static string BuildRegistryTestUrl(ZastavaSourceConfig config)
|
||||
{
|
||||
var baseUrl = config.RegistryUrl.TrimEnd('/');
|
||||
|
||||
return config.RegistryType switch
|
||||
{
|
||||
// Docker Registry V2 API
|
||||
RegistryType.DockerHub => "https://registry-1.docker.io/v2/",
|
||||
RegistryType.Harbor or
|
||||
RegistryType.Quay or
|
||||
RegistryType.Nexus or
|
||||
RegistryType.JFrog or
|
||||
RegistryType.Custom => $"{baseUrl}/v2/",
|
||||
|
||||
// Cloud provider registries
|
||||
RegistryType.Ecr => $"{baseUrl}/v2/", // ECR uses standard V2 API
|
||||
RegistryType.Gcr => $"{baseUrl}/v2/",
|
||||
RegistryType.Acr => $"{baseUrl}/v2/",
|
||||
RegistryType.Ghcr => "https://ghcr.io/v2/",
|
||||
|
||||
// GitLab container registry
|
||||
RegistryType.GitLab => $"{baseUrl}/v2/",
|
||||
|
||||
_ => $"{baseUrl}/v2/"
|
||||
};
|
||||
}
|
||||
|
||||
private static string? ExtractAuthFromTestCredentials(JsonDocument credentials)
|
||||
{
|
||||
var root = credentials.RootElement;
|
||||
|
||||
// Support various credential formats
|
||||
if (root.TryGetProperty("token", out var token))
|
||||
{
|
||||
return $"Bearer {token.GetString()}";
|
||||
}
|
||||
|
||||
if (root.TryGetProperty("username", out var username) &&
|
||||
root.TryGetProperty("password", out var password))
|
||||
{
|
||||
var encoded = Convert.ToBase64String(
|
||||
System.Text.Encoding.UTF8.GetBytes(
|
||||
$"{username.GetString()}:{password.GetString()}"));
|
||||
return $"Basic {encoded}";
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string? BuildAuthHeader(ResolvedCredential? credential)
|
||||
{
|
||||
if (credential == null) return null;
|
||||
|
||||
return credential.Type switch
|
||||
{
|
||||
CredentialType.BearerToken => $"Bearer {credential.Token}",
|
||||
CredentialType.BasicAuth => $"Basic {Convert.ToBase64String(
|
||||
System.Text.Encoding.UTF8.GetBytes($"{credential.Username}:{credential.Password}"))}",
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<string> TruncateResponseBody(HttpResponseMessage response, CancellationToken ct)
|
||||
{
|
||||
var body = await response.Content.ReadAsStringAsync(ct);
|
||||
return body.Length > 500 ? body[..500] + "..." : body;
|
||||
}
|
||||
}
|
||||
@@ -105,6 +105,9 @@ public sealed record ListSourcesRequest
|
||||
/// <summary>Search term (matches name, description).</summary>
|
||||
public string? Search { get; init; }
|
||||
|
||||
/// <summary>Filter by name contains (case-insensitive).</summary>
|
||||
public string? NameContains { get; init; }
|
||||
|
||||
/// <summary>Page size.</summary>
|
||||
public int Limit { get; init; } = 25;
|
||||
|
||||
@@ -163,22 +166,7 @@ public sealed record TestConnectionRequest
|
||||
public string? AuthRef { get; init; }
|
||||
|
||||
/// <summary>Inline credentials for testing (not stored).</summary>
|
||||
public TestCredentials? TestCredentials { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Inline credentials for connection testing.
|
||||
/// </summary>
|
||||
public sealed record TestCredentials
|
||||
{
|
||||
/// <summary>Username (registry auth, git).</summary>
|
||||
public string? Username { get; init; }
|
||||
|
||||
/// <summary>Password or token.</summary>
|
||||
public string? Password { get; init; }
|
||||
|
||||
/// <summary>SSH private key (git).</summary>
|
||||
public string? SshKey { get; init; }
|
||||
public JsonDocument? TestCredentials { get; init; }
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
@@ -310,19 +298,23 @@ public sealed record ConnectionTestResult
|
||||
public required bool Success { get; init; }
|
||||
public string? Message { get; init; }
|
||||
public string? ErrorCode { get; init; }
|
||||
public DateTimeOffset TestedAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
public List<ConnectionTestCheck> Checks { get; init; } = [];
|
||||
public Dictionary<string, object>? Details { get; init; }
|
||||
|
||||
public static ConnectionTestResult Succeeded(string? message = null) => new()
|
||||
{
|
||||
Success = true,
|
||||
Message = message ?? "Connection successful"
|
||||
Message = message ?? "Connection successful",
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
public static ConnectionTestResult Failed(string message, string? errorCode = null) => new()
|
||||
{
|
||||
Success = false,
|
||||
Message = message,
|
||||
ErrorCode = errorCode
|
||||
ErrorCode = errorCode,
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,126 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Scanner.Sources.Configuration;
|
||||
using StellaOps.Scanner.Sources.ConnectionTesters;
|
||||
using StellaOps.Scanner.Sources.Handlers;
|
||||
using StellaOps.Scanner.Sources.Handlers.Cli;
|
||||
using StellaOps.Scanner.Sources.Handlers.Docker;
|
||||
using StellaOps.Scanner.Sources.Handlers.Git;
|
||||
using StellaOps.Scanner.Sources.Handlers.Zastava;
|
||||
using StellaOps.Scanner.Sources.Persistence;
|
||||
using StellaOps.Scanner.Sources.Scheduling;
|
||||
using StellaOps.Scanner.Sources.Services;
|
||||
using StellaOps.Scanner.Sources.Triggers;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.DependencyInjection;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering Scanner.Sources services.
|
||||
/// </summary>
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds SBOM source management services to the service collection.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddSbomSources(
|
||||
this IServiceCollection services,
|
||||
Action<SbomSourcesOptions>? configure = null)
|
||||
{
|
||||
var options = new SbomSourcesOptions();
|
||||
configure?.Invoke(options);
|
||||
|
||||
// Register options
|
||||
services.AddSingleton(options);
|
||||
|
||||
// Register core services
|
||||
services.AddScoped<ISbomSourceService, SbomSourceService>();
|
||||
services.AddScoped<ISourceConfigValidator, SourceConfigValidator>();
|
||||
services.AddScoped<ISourceConnectionTester, SourceConnectionTester>();
|
||||
|
||||
// Register repositories
|
||||
services.AddScoped<ISbomSourceRepository, SbomSourceRepository>();
|
||||
services.AddScoped<ISbomSourceRunRepository, SbomSourceRunRepository>();
|
||||
|
||||
// Register connection testers
|
||||
services.AddScoped<ISourceTypeConnectionTester, ZastavaConnectionTester>();
|
||||
services.AddScoped<ISourceTypeConnectionTester, DockerConnectionTester>();
|
||||
services.AddScoped<ISourceTypeConnectionTester, GitConnectionTester>();
|
||||
services.AddScoped<ISourceTypeConnectionTester, CliConnectionTester>();
|
||||
|
||||
// Register source type handlers
|
||||
services.AddScoped<ISourceTypeHandler, ZastavaSourceHandler>();
|
||||
services.AddScoped<ISourceTypeHandler, DockerSourceHandler>();
|
||||
services.AddScoped<ISourceTypeHandler, GitSourceHandler>();
|
||||
services.AddScoped<ISourceTypeHandler, CliSourceHandler>();
|
||||
|
||||
// Register trigger dispatcher
|
||||
services.AddScoped<ISourceTriggerDispatcher, SourceTriggerDispatcher>();
|
||||
|
||||
// Register image discovery service
|
||||
services.AddSingleton<IImageDiscoveryService, ImageDiscoveryService>();
|
||||
|
||||
// Register HTTP client for connection testing
|
||||
services.AddHttpClient("SourceConnectionTest", client =>
|
||||
{
|
||||
client.DefaultRequestHeaders.Add("User-Agent", "StellaOps-SourceConnectionTester/1.0");
|
||||
client.Timeout = TimeSpan.FromSeconds(30);
|
||||
});
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the source scheduler background service.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddSbomSourceScheduler(
|
||||
this IServiceCollection services,
|
||||
Action<SourceSchedulerOptions>? configure = null)
|
||||
{
|
||||
services.Configure<SourceSchedulerOptions>(opt =>
|
||||
{
|
||||
configure?.Invoke(opt);
|
||||
});
|
||||
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
services.AddHostedService<SourceSchedulerHostedService>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds a custom credential resolver for SBOM sources.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddSbomSourceCredentialResolver<TResolver>(
|
||||
this IServiceCollection services)
|
||||
where TResolver : class, ICredentialResolver
|
||||
{
|
||||
services.AddScoped<ICredentialResolver, TResolver>();
|
||||
return services;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for SBOM source management.
|
||||
/// </summary>
|
||||
public sealed class SbomSourcesOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Default timeout for connection tests in seconds.
|
||||
/// </summary>
|
||||
public int ConnectionTestTimeoutSeconds { get; set; } = 30;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of runs to retain per source.
|
||||
/// </summary>
|
||||
public int MaxRunsPerSource { get; set; } = 1000;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to enable connection test caching.
|
||||
/// </summary>
|
||||
public bool EnableConnectionTestCaching { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Connection test cache duration in minutes.
|
||||
/// </summary>
|
||||
public int ConnectionTestCacheMinutes { get; set; } = 5;
|
||||
}
|
||||
@@ -0,0 +1,358 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Sources.Configuration;
|
||||
using StellaOps.Scanner.Sources.Contracts;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using StellaOps.Scanner.Sources.Services;
|
||||
using StellaOps.Scanner.Sources.Triggers;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.Handlers.Cli;
|
||||
|
||||
/// <summary>
|
||||
/// Handler for CLI (external submission) sources.
|
||||
/// Receives SBOM uploads from CI/CD pipelines via the CLI tool.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// CLI sources are passive - they don't discover targets but receive
|
||||
/// submissions from external systems. The handler validates submissions
|
||||
/// against the configured rules.
|
||||
/// </remarks>
|
||||
public sealed class CliSourceHandler : ISourceTypeHandler
|
||||
{
|
||||
private readonly ISourceConfigValidator _configValidator;
|
||||
private readonly ILogger<CliSourceHandler> _logger;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
public SbomSourceType SourceType => SbomSourceType.Cli;
|
||||
public bool SupportsWebhooks => false;
|
||||
public bool SupportsScheduling => false;
|
||||
public int MaxConcurrentTargets => 100;
|
||||
|
||||
public CliSourceHandler(
|
||||
ISourceConfigValidator configValidator,
|
||||
ILogger<CliSourceHandler> logger)
|
||||
{
|
||||
_configValidator = configValidator;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// CLI sources don't discover targets - submissions come via API.
|
||||
/// This method returns an empty list for scheduled/manual triggers.
|
||||
/// For submissions, the target is created from the submission metadata.
|
||||
/// </summary>
|
||||
public Task<IReadOnlyList<ScanTarget>> DiscoverTargetsAsync(
|
||||
SbomSource source,
|
||||
TriggerContext context,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var config = source.Configuration.Deserialize<CliSourceConfig>(JsonOptions);
|
||||
if (config == null)
|
||||
{
|
||||
_logger.LogWarning("Invalid configuration for source {SourceId}", source.SourceId);
|
||||
return Task.FromResult<IReadOnlyList<ScanTarget>>([]);
|
||||
}
|
||||
|
||||
// CLI sources only process submissions via the SubmissionContext
|
||||
if (context.Metadata.TryGetValue("submissionId", out var submissionId))
|
||||
{
|
||||
// Create target from submission metadata
|
||||
var target = new ScanTarget
|
||||
{
|
||||
Reference = context.Metadata.TryGetValue("reference", out var refValue) ? refValue : submissionId,
|
||||
Metadata = new Dictionary<string, string>(context.Metadata)
|
||||
};
|
||||
|
||||
_logger.LogInformation(
|
||||
"Created target from CLI submission {SubmissionId} for source {SourceId}",
|
||||
submissionId, source.SourceId);
|
||||
|
||||
return Task.FromResult<IReadOnlyList<ScanTarget>>([target]);
|
||||
}
|
||||
|
||||
// For scheduled/manual triggers, CLI sources have nothing to discover
|
||||
_logger.LogDebug(
|
||||
"CLI source {SourceId} has no targets to discover for trigger {Trigger}",
|
||||
source.SourceId, context.Trigger);
|
||||
|
||||
return Task.FromResult<IReadOnlyList<ScanTarget>>([]);
|
||||
}
|
||||
|
||||
public ConfigValidationResult ValidateConfiguration(JsonDocument configuration)
|
||||
{
|
||||
return _configValidator.Validate(SbomSourceType.Cli, configuration);
|
||||
}
|
||||
|
||||
public Task<ConnectionTestResult> TestConnectionAsync(
|
||||
SbomSource source,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var config = source.Configuration.Deserialize<CliSourceConfig>(JsonOptions);
|
||||
if (config == null)
|
||||
{
|
||||
return Task.FromResult(new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Invalid configuration",
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
});
|
||||
}
|
||||
|
||||
// CLI sources don't have external connections to test
|
||||
// We just validate the configuration
|
||||
return Task.FromResult(new ConnectionTestResult
|
||||
{
|
||||
Success = true,
|
||||
Message = "CLI source configuration is valid",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = new Dictionary<string, object>
|
||||
{
|
||||
["allowedTools"] = config.AllowedTools,
|
||||
["allowedFormats"] = config.Validation.AllowedFormats.Select(f => f.ToString()).ToArray(),
|
||||
["requireSignedSbom"] = config.Validation.RequireSignedSbom,
|
||||
["maxSbomSizeMb"] = config.Validation.MaxSbomSizeBytes / (1024 * 1024)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validate an SBOM submission against the source configuration.
|
||||
/// </summary>
|
||||
public SubmissionValidationResult ValidateSubmission(
|
||||
SbomSource source,
|
||||
CliSubmissionRequest submission)
|
||||
{
|
||||
var config = source.Configuration.Deserialize<CliSourceConfig>(JsonOptions);
|
||||
if (config == null)
|
||||
{
|
||||
return SubmissionValidationResult.Failed("Invalid source configuration");
|
||||
}
|
||||
|
||||
var errors = new List<string>();
|
||||
|
||||
// Validate tool
|
||||
if (!config.AllowedTools.Contains(submission.Tool, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
errors.Add($"Tool '{submission.Tool}' is not allowed. Allowed tools: {string.Join(", ", config.AllowedTools)}");
|
||||
}
|
||||
|
||||
// Validate CI system if specified
|
||||
if (config.AllowedCiSystems is { Length: > 0 } && submission.CiSystem != null)
|
||||
{
|
||||
if (!config.AllowedCiSystems.Contains(submission.CiSystem, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
errors.Add($"CI system '{submission.CiSystem}' is not allowed. Allowed systems: {string.Join(", ", config.AllowedCiSystems)}");
|
||||
}
|
||||
}
|
||||
|
||||
// Validate format
|
||||
if (!config.Validation.AllowedFormats.Contains(submission.Format))
|
||||
{
|
||||
errors.Add($"Format '{submission.Format}' is not allowed. Allowed formats: {string.Join(", ", config.Validation.AllowedFormats)}");
|
||||
}
|
||||
|
||||
// Validate size
|
||||
if (submission.SbomSizeBytes > config.Validation.MaxSbomSizeBytes)
|
||||
{
|
||||
var maxMb = config.Validation.MaxSbomSizeBytes / (1024 * 1024);
|
||||
var actualMb = submission.SbomSizeBytes / (1024 * 1024);
|
||||
errors.Add($"SBOM size ({actualMb} MB) exceeds maximum allowed size ({maxMb} MB)");
|
||||
}
|
||||
|
||||
// Validate signature if required
|
||||
if (config.Validation.RequireSignedSbom && string.IsNullOrEmpty(submission.Signature))
|
||||
{
|
||||
errors.Add("Signed SBOM is required but no signature was provided");
|
||||
}
|
||||
|
||||
// Validate signer if signature is present
|
||||
if (!string.IsNullOrEmpty(submission.Signature) &&
|
||||
config.Validation.AllowedSigners is { Length: > 0 })
|
||||
{
|
||||
if (!config.Validation.AllowedSigners.Contains(submission.SignerFingerprint, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
errors.Add($"Signer fingerprint '{submission.SignerFingerprint}' is not in the allowed list");
|
||||
}
|
||||
}
|
||||
|
||||
// Validate attribution requirements
|
||||
if (config.Attribution.RequireBuildId && string.IsNullOrEmpty(submission.BuildId))
|
||||
{
|
||||
errors.Add("Build ID is required");
|
||||
}
|
||||
|
||||
if (config.Attribution.RequireRepository && string.IsNullOrEmpty(submission.Repository))
|
||||
{
|
||||
errors.Add("Repository reference is required");
|
||||
}
|
||||
|
||||
if (config.Attribution.RequireCommitSha && string.IsNullOrEmpty(submission.CommitSha))
|
||||
{
|
||||
errors.Add("Commit SHA is required");
|
||||
}
|
||||
|
||||
if (config.Attribution.RequirePipelineId && string.IsNullOrEmpty(submission.PipelineId))
|
||||
{
|
||||
errors.Add("Pipeline ID is required");
|
||||
}
|
||||
|
||||
// Validate repository against allowed patterns
|
||||
if (!string.IsNullOrEmpty(submission.Repository) &&
|
||||
config.Attribution.AllowedRepositories is { Length: > 0 })
|
||||
{
|
||||
var repoAllowed = config.Attribution.AllowedRepositories
|
||||
.Any(p => MatchesPattern(submission.Repository, p));
|
||||
|
||||
if (!repoAllowed)
|
||||
{
|
||||
errors.Add($"Repository '{submission.Repository}' is not in the allowed list");
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.Count > 0)
|
||||
{
|
||||
return SubmissionValidationResult.Failed(errors);
|
||||
}
|
||||
|
||||
return SubmissionValidationResult.Valid();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generate a token for CLI authentication to this source.
|
||||
/// </summary>
|
||||
public CliAuthToken GenerateAuthToken(SbomSource source, TimeSpan validity)
|
||||
{
|
||||
var tokenBytes = new byte[32];
|
||||
RandomNumberGenerator.Fill(tokenBytes);
|
||||
var token = Convert.ToBase64String(tokenBytes);
|
||||
|
||||
// Create token hash for storage
|
||||
var tokenHash = SHA256.HashData(Encoding.UTF8.GetBytes(token));
|
||||
|
||||
return new CliAuthToken
|
||||
{
|
||||
Token = token,
|
||||
TokenHash = Convert.ToHexString(tokenHash).ToLowerInvariant(),
|
||||
SourceId = source.SourceId,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.Add(validity),
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static bool MatchesPattern(string value, string pattern)
|
||||
{
|
||||
var regexPattern = "^" + Regex.Escape(pattern)
|
||||
.Replace("\\*\\*", ".*")
|
||||
.Replace("\\*", "[^/]*")
|
||||
.Replace("\\?", ".") + "$";
|
||||
|
||||
return Regex.IsMatch(value, regexPattern, RegexOptions.IgnoreCase);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for CLI SBOM submission.
|
||||
/// </summary>
|
||||
public sealed record CliSubmissionRequest
|
||||
{
|
||||
/// <summary>Scanner/tool that generated the SBOM.</summary>
|
||||
public required string Tool { get; init; }
|
||||
|
||||
/// <summary>Tool version.</summary>
|
||||
public string? ToolVersion { get; init; }
|
||||
|
||||
/// <summary>CI system (e.g., "github-actions", "gitlab-ci").</summary>
|
||||
public string? CiSystem { get; init; }
|
||||
|
||||
/// <summary>SBOM format.</summary>
|
||||
public required SbomFormat Format { get; init; }
|
||||
|
||||
/// <summary>SBOM format version.</summary>
|
||||
public string? FormatVersion { get; init; }
|
||||
|
||||
/// <summary>SBOM size in bytes.</summary>
|
||||
public long SbomSizeBytes { get; init; }
|
||||
|
||||
/// <summary>SBOM content hash (for verification).</summary>
|
||||
public string? ContentHash { get; init; }
|
||||
|
||||
/// <summary>SBOM signature (if signed).</summary>
|
||||
public string? Signature { get; init; }
|
||||
|
||||
/// <summary>Signer key fingerprint.</summary>
|
||||
public string? SignerFingerprint { get; init; }
|
||||
|
||||
/// <summary>Build ID.</summary>
|
||||
public string? BuildId { get; init; }
|
||||
|
||||
/// <summary>Repository URL.</summary>
|
||||
public string? Repository { get; init; }
|
||||
|
||||
/// <summary>Commit SHA.</summary>
|
||||
public string? CommitSha { get; init; }
|
||||
|
||||
/// <summary>Branch name.</summary>
|
||||
public string? Branch { get; init; }
|
||||
|
||||
/// <summary>Pipeline/workflow ID.</summary>
|
||||
public string? PipelineId { get; init; }
|
||||
|
||||
/// <summary>Pipeline/workflow name.</summary>
|
||||
public string? PipelineName { get; init; }
|
||||
|
||||
/// <summary>Subject reference (what was scanned).</summary>
|
||||
public required string Subject { get; init; }
|
||||
|
||||
/// <summary>Subject digest.</summary>
|
||||
public string? SubjectDigest { get; init; }
|
||||
|
||||
/// <summary>Additional metadata.</summary>
|
||||
public Dictionary<string, string> Metadata { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of submission validation.
|
||||
/// </summary>
|
||||
public sealed record SubmissionValidationResult
|
||||
{
|
||||
public bool IsValid { get; init; }
|
||||
public IReadOnlyList<string> Errors { get; init; } = [];
|
||||
|
||||
public static SubmissionValidationResult Valid() =>
|
||||
new() { IsValid = true };
|
||||
|
||||
public static SubmissionValidationResult Failed(string error) =>
|
||||
new() { IsValid = false, Errors = [error] };
|
||||
|
||||
public static SubmissionValidationResult Failed(IReadOnlyList<string> errors) =>
|
||||
new() { IsValid = false, Errors = errors };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// CLI authentication token.
|
||||
/// </summary>
|
||||
public sealed record CliAuthToken
|
||||
{
|
||||
/// <summary>The raw token (only returned once on creation).</summary>
|
||||
public required string Token { get; init; }
|
||||
|
||||
/// <summary>Hash of the token (stored in database).</summary>
|
||||
public required string TokenHash { get; init; }
|
||||
|
||||
/// <summary>Source this token is for.</summary>
|
||||
public Guid SourceId { get; init; }
|
||||
|
||||
/// <summary>When the token expires.</summary>
|
||||
public DateTimeOffset ExpiresAt { get; init; }
|
||||
|
||||
/// <summary>When the token was created.</summary>
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,341 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Sources.Configuration;
|
||||
using StellaOps.Scanner.Sources.Contracts;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using StellaOps.Scanner.Sources.Handlers.Zastava;
|
||||
using StellaOps.Scanner.Sources.Services;
|
||||
using StellaOps.Scanner.Sources.Triggers;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.Handlers.Docker;
|
||||
|
||||
/// <summary>
|
||||
/// Handler for Docker (direct image scan) sources.
|
||||
/// Scans specific images from container registries on schedule or on-demand.
|
||||
/// </summary>
|
||||
public sealed class DockerSourceHandler : ISourceTypeHandler
|
||||
{
|
||||
private readonly IRegistryClientFactory _clientFactory;
|
||||
private readonly ICredentialResolver _credentialResolver;
|
||||
private readonly ISourceConfigValidator _configValidator;
|
||||
private readonly IImageDiscoveryService _discoveryService;
|
||||
private readonly ILogger<DockerSourceHandler> _logger;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
public SbomSourceType SourceType => SbomSourceType.Docker;
|
||||
public bool SupportsWebhooks => false;
|
||||
public bool SupportsScheduling => true;
|
||||
public int MaxConcurrentTargets => 50;
|
||||
|
||||
public DockerSourceHandler(
|
||||
IRegistryClientFactory clientFactory,
|
||||
ICredentialResolver credentialResolver,
|
||||
ISourceConfigValidator configValidator,
|
||||
IImageDiscoveryService discoveryService,
|
||||
ILogger<DockerSourceHandler> logger)
|
||||
{
|
||||
_clientFactory = clientFactory;
|
||||
_credentialResolver = credentialResolver;
|
||||
_configValidator = configValidator;
|
||||
_discoveryService = discoveryService;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<ScanTarget>> DiscoverTargetsAsync(
|
||||
SbomSource source,
|
||||
TriggerContext context,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var config = source.Configuration.Deserialize<DockerSourceConfig>(JsonOptions);
|
||||
if (config == null)
|
||||
{
|
||||
_logger.LogWarning("Invalid configuration for source {SourceId}", source.SourceId);
|
||||
return [];
|
||||
}
|
||||
|
||||
var credentials = await GetCredentialsAsync(source.AuthRef, ct);
|
||||
var registryType = InferRegistryType(config.RegistryUrl);
|
||||
|
||||
using var client = _clientFactory.Create(registryType, config.RegistryUrl, credentials);
|
||||
|
||||
var targets = new List<ScanTarget>();
|
||||
|
||||
foreach (var imageSpec in config.Images)
|
||||
{
|
||||
try
|
||||
{
|
||||
var discovered = await DiscoverImageTargetsAsync(
|
||||
client, config, imageSpec, ct);
|
||||
targets.AddRange(discovered);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Failed to discover targets for image {Reference}",
|
||||
imageSpec.Reference);
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Discovered {Count} targets from {ImageCount} image specs for source {SourceId}",
|
||||
targets.Count, config.Images.Length, source.SourceId);
|
||||
|
||||
return targets;
|
||||
}
|
||||
|
||||
private async Task<IReadOnlyList<ScanTarget>> DiscoverImageTargetsAsync(
|
||||
IRegistryClient client,
|
||||
DockerSourceConfig config,
|
||||
ImageSpec imageSpec,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var targets = new List<ScanTarget>();
|
||||
|
||||
// Parse the reference to get repository and optional tag
|
||||
var (repository, tag) = ParseReference(imageSpec.Reference);
|
||||
|
||||
// If the reference has a specific tag and no patterns, just scan that image
|
||||
if (tag != null && (imageSpec.TagPatterns == null || imageSpec.TagPatterns.Length == 0))
|
||||
{
|
||||
var digest = await client.GetDigestAsync(repository, tag, ct);
|
||||
targets.Add(new ScanTarget
|
||||
{
|
||||
Reference = BuildFullReference(config.RegistryUrl, repository, tag),
|
||||
Digest = digest,
|
||||
Priority = config.ScanOptions.Priority,
|
||||
Metadata = new Dictionary<string, string>
|
||||
{
|
||||
["repository"] = repository,
|
||||
["tag"] = tag,
|
||||
["registryUrl"] = config.RegistryUrl
|
||||
}
|
||||
});
|
||||
return targets;
|
||||
}
|
||||
|
||||
// Discover tags based on patterns
|
||||
var tagPatterns = imageSpec.TagPatterns ?? ["*"];
|
||||
var allTags = await client.ListTagsAsync(repository, tagPatterns, imageSpec.MaxTags * 2, ct);
|
||||
|
||||
// Filter and sort tags
|
||||
var filteredTags = _discoveryService.FilterTags(
|
||||
allTags,
|
||||
config.Discovery?.ExcludePatterns,
|
||||
config.Discovery?.IncludePreRelease ?? false);
|
||||
|
||||
var sortedTags = _discoveryService.SortTags(
|
||||
filteredTags,
|
||||
config.Discovery?.SortOrder ?? TagSortOrder.SemVerDescending);
|
||||
|
||||
// Apply age filter if specified
|
||||
if (imageSpec.MaxAgeHours.HasValue)
|
||||
{
|
||||
var cutoff = DateTimeOffset.UtcNow.AddHours(-imageSpec.MaxAgeHours.Value);
|
||||
sortedTags = sortedTags
|
||||
.Where(t => t.LastUpdated == null || t.LastUpdated >= cutoff)
|
||||
.ToList();
|
||||
}
|
||||
|
||||
// Take the configured number of tags
|
||||
var tagsToScan = sortedTags.Take(imageSpec.MaxTags).ToList();
|
||||
|
||||
foreach (var tagInfo in tagsToScan)
|
||||
{
|
||||
targets.Add(new ScanTarget
|
||||
{
|
||||
Reference = BuildFullReference(config.RegistryUrl, repository, tagInfo.Name),
|
||||
Digest = tagInfo.Digest,
|
||||
Priority = config.ScanOptions.Priority,
|
||||
Metadata = new Dictionary<string, string>
|
||||
{
|
||||
["repository"] = repository,
|
||||
["tag"] = tagInfo.Name,
|
||||
["registryUrl"] = config.RegistryUrl,
|
||||
["digestPin"] = imageSpec.DigestPin.ToString().ToLowerInvariant()
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return targets;
|
||||
}
|
||||
|
||||
public ConfigValidationResult ValidateConfiguration(JsonDocument configuration)
|
||||
{
|
||||
return _configValidator.Validate(SbomSourceType.Docker, configuration);
|
||||
}
|
||||
|
||||
public async Task<ConnectionTestResult> TestConnectionAsync(
|
||||
SbomSource source,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var config = source.Configuration.Deserialize<DockerSourceConfig>(JsonOptions);
|
||||
if (config == null)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Invalid configuration",
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var credentials = await GetCredentialsAsync(source.AuthRef, ct);
|
||||
var registryType = InferRegistryType(config.RegistryUrl);
|
||||
using var client = _clientFactory.Create(registryType, config.RegistryUrl, credentials);
|
||||
|
||||
var pingSuccess = await client.PingAsync(ct);
|
||||
if (!pingSuccess)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Registry ping failed",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = new Dictionary<string, object>
|
||||
{
|
||||
["registryUrl"] = config.RegistryUrl
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Try to get digest for the first image to verify access
|
||||
if (config.Images.Length > 0)
|
||||
{
|
||||
var (repo, tag) = ParseReference(config.Images[0].Reference);
|
||||
var digest = await client.GetDigestAsync(repo, tag ?? "latest", ct);
|
||||
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = true,
|
||||
Message = "Successfully connected to registry",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = new Dictionary<string, object>
|
||||
{
|
||||
["registryUrl"] = config.RegistryUrl,
|
||||
["testImage"] = config.Images[0].Reference,
|
||||
["imageAccessible"] = digest != null
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = true,
|
||||
Message = "Successfully connected to registry",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = new Dictionary<string, object>
|
||||
{
|
||||
["registryUrl"] = config.RegistryUrl
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Connection test failed for source {SourceId}", source.SourceId);
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = $"Connection failed: {ex.Message}",
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<RegistryCredentials?> GetCredentialsAsync(string? authRef, CancellationToken ct)
|
||||
{
|
||||
if (string.IsNullOrEmpty(authRef))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var resolved = await _credentialResolver.ResolveAsync(authRef, ct);
|
||||
if (resolved == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return resolved.Type switch
|
||||
{
|
||||
CredentialType.BasicAuth => new RegistryCredentials
|
||||
{
|
||||
AuthType = RegistryAuthType.Basic,
|
||||
Username = resolved.Username,
|
||||
Password = resolved.Password
|
||||
},
|
||||
CredentialType.BearerToken => new RegistryCredentials
|
||||
{
|
||||
AuthType = RegistryAuthType.Token,
|
||||
Token = resolved.Token
|
||||
},
|
||||
CredentialType.AwsCredentials => new RegistryCredentials
|
||||
{
|
||||
AuthType = RegistryAuthType.AwsEcr,
|
||||
AwsAccessKey = resolved.Properties?.GetValueOrDefault("accessKey"),
|
||||
AwsSecretKey = resolved.Properties?.GetValueOrDefault("secretKey"),
|
||||
AwsRegion = resolved.Properties?.GetValueOrDefault("region")
|
||||
},
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
private static RegistryType InferRegistryType(string registryUrl)
|
||||
{
|
||||
var host = new Uri(registryUrl).Host.ToLowerInvariant();
|
||||
|
||||
return host switch
|
||||
{
|
||||
_ when host.Contains("docker.io") || host.Contains("docker.com") => RegistryType.DockerHub,
|
||||
_ when host.Contains("ecr.") && host.Contains("amazonaws.com") => RegistryType.Ecr,
|
||||
_ when host.Contains("gcr.io") || host.Contains("pkg.dev") => RegistryType.Gcr,
|
||||
_ when host.Contains("azurecr.io") => RegistryType.Acr,
|
||||
_ when host.Contains("ghcr.io") => RegistryType.Ghcr,
|
||||
_ when host.Contains("quay.io") => RegistryType.Quay,
|
||||
_ when host.Contains("jfrog.io") || host.Contains("artifactory") => RegistryType.Artifactory,
|
||||
_ => RegistryType.Generic
|
||||
};
|
||||
}
|
||||
|
||||
private static (string Repository, string? Tag) ParseReference(string reference)
|
||||
{
|
||||
// Handle digest references
|
||||
if (reference.Contains('@'))
|
||||
{
|
||||
var parts = reference.Split('@', 2);
|
||||
return (parts[0], null);
|
||||
}
|
||||
|
||||
// Handle tag references
|
||||
if (reference.Contains(':'))
|
||||
{
|
||||
var lastColon = reference.LastIndexOf(':');
|
||||
return (reference[..lastColon], reference[(lastColon + 1)..]);
|
||||
}
|
||||
|
||||
return (reference, null);
|
||||
}
|
||||
|
||||
private static string BuildFullReference(string registryUrl, string repository, string tag)
|
||||
{
|
||||
var host = new Uri(registryUrl).Host;
|
||||
|
||||
// Docker Hub special case
|
||||
if (host.Contains("docker.io") || host.Contains("docker.com"))
|
||||
{
|
||||
if (!repository.Contains('/'))
|
||||
{
|
||||
repository = $"library/{repository}";
|
||||
}
|
||||
return $"{repository}:{tag}";
|
||||
}
|
||||
|
||||
return $"{host}/{repository}:{tag}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,206 @@
|
||||
using System.Text.RegularExpressions;
|
||||
using StellaOps.Scanner.Sources.Configuration;
|
||||
using StellaOps.Scanner.Sources.Handlers.Zastava;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.Handlers.Docker;
|
||||
|
||||
/// <summary>
|
||||
/// Service for discovering and filtering container image tags.
|
||||
/// </summary>
|
||||
public interface IImageDiscoveryService
|
||||
{
|
||||
/// <summary>
|
||||
/// Filter tags based on exclusion patterns and pre-release settings.
|
||||
/// </summary>
|
||||
IReadOnlyList<RegistryTag> FilterTags(
|
||||
IReadOnlyList<RegistryTag> tags,
|
||||
string[]? excludePatterns,
|
||||
bool includePreRelease);
|
||||
|
||||
/// <summary>
|
||||
/// Sort tags according to the specified sort order.
|
||||
/// </summary>
|
||||
IReadOnlyList<RegistryTag> SortTags(
|
||||
IReadOnlyList<RegistryTag> tags,
|
||||
TagSortOrder sortOrder);
|
||||
|
||||
/// <summary>
|
||||
/// Parse a semantic version from a tag name.
|
||||
/// </summary>
|
||||
SemVer? ParseSemVer(string tag);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of tag discovery and filtering.
|
||||
/// </summary>
|
||||
public sealed class ImageDiscoveryService : IImageDiscoveryService
|
||||
{
|
||||
private static readonly Regex SemVerRegex = new(
|
||||
@"^v?(?<major>\d+)\.(?<minor>\d+)\.(?<patch>\d+)" +
|
||||
@"(?:-(?<prerelease>[a-zA-Z0-9.-]+))?" +
|
||||
@"(?:\+(?<metadata>[a-zA-Z0-9.-]+))?$",
|
||||
RegexOptions.Compiled | RegexOptions.IgnoreCase);
|
||||
|
||||
private static readonly Regex PreReleasePattern = new(
|
||||
@"(?:alpha|beta|rc|pre|preview|dev|snapshot|canary|nightly)",
|
||||
RegexOptions.Compiled | RegexOptions.IgnoreCase);
|
||||
|
||||
public IReadOnlyList<RegistryTag> FilterTags(
|
||||
IReadOnlyList<RegistryTag> tags,
|
||||
string[]? excludePatterns,
|
||||
bool includePreRelease)
|
||||
{
|
||||
var filtered = tags.AsEnumerable();
|
||||
|
||||
// Apply exclusion patterns
|
||||
if (excludePatterns is { Length: > 0 })
|
||||
{
|
||||
var regexPatterns = excludePatterns
|
||||
.Select(p => new Regex(
|
||||
"^" + Regex.Escape(p).Replace("\\*", ".*").Replace("\\?", ".") + "$",
|
||||
RegexOptions.IgnoreCase))
|
||||
.ToList();
|
||||
|
||||
filtered = filtered.Where(t =>
|
||||
!regexPatterns.Any(r => r.IsMatch(t.Name)));
|
||||
}
|
||||
|
||||
// Filter pre-release tags if not included
|
||||
if (!includePreRelease)
|
||||
{
|
||||
filtered = filtered.Where(t => !IsPreRelease(t.Name));
|
||||
}
|
||||
|
||||
return filtered.ToList();
|
||||
}
|
||||
|
||||
public IReadOnlyList<RegistryTag> SortTags(
|
||||
IReadOnlyList<RegistryTag> tags,
|
||||
TagSortOrder sortOrder)
|
||||
{
|
||||
return sortOrder switch
|
||||
{
|
||||
TagSortOrder.SemVerDescending => tags
|
||||
.Select(t => (Tag: t, SemVer: ParseSemVer(t.Name)))
|
||||
.OrderByDescending(x => x.SemVer?.Major ?? 0)
|
||||
.ThenByDescending(x => x.SemVer?.Minor ?? 0)
|
||||
.ThenByDescending(x => x.SemVer?.Patch ?? 0)
|
||||
.ThenBy(x => x.SemVer?.PreRelease ?? "")
|
||||
.ThenByDescending(x => x.Tag.Name)
|
||||
.Select(x => x.Tag)
|
||||
.ToList(),
|
||||
|
||||
TagSortOrder.SemVerAscending => tags
|
||||
.Select(t => (Tag: t, SemVer: ParseSemVer(t.Name)))
|
||||
.OrderBy(x => x.SemVer?.Major ?? int.MaxValue)
|
||||
.ThenBy(x => x.SemVer?.Minor ?? int.MaxValue)
|
||||
.ThenBy(x => x.SemVer?.Patch ?? int.MaxValue)
|
||||
.ThenByDescending(x => x.SemVer?.PreRelease ?? "")
|
||||
.ThenBy(x => x.Tag.Name)
|
||||
.Select(x => x.Tag)
|
||||
.ToList(),
|
||||
|
||||
TagSortOrder.AlphaDescending => tags
|
||||
.OrderByDescending(t => t.Name)
|
||||
.ToList(),
|
||||
|
||||
TagSortOrder.AlphaAscending => tags
|
||||
.OrderBy(t => t.Name)
|
||||
.ToList(),
|
||||
|
||||
TagSortOrder.DateDescending => tags
|
||||
.OrderByDescending(t => t.LastUpdated ?? DateTimeOffset.MinValue)
|
||||
.ThenByDescending(t => t.Name)
|
||||
.ToList(),
|
||||
|
||||
TagSortOrder.DateAscending => tags
|
||||
.OrderBy(t => t.LastUpdated ?? DateTimeOffset.MaxValue)
|
||||
.ThenBy(t => t.Name)
|
||||
.ToList(),
|
||||
|
||||
_ => tags.ToList()
|
||||
};
|
||||
}
|
||||
|
||||
public SemVer? ParseSemVer(string tag)
|
||||
{
|
||||
var match = SemVerRegex.Match(tag);
|
||||
if (!match.Success)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new SemVer
|
||||
{
|
||||
Major = int.Parse(match.Groups["major"].Value),
|
||||
Minor = int.Parse(match.Groups["minor"].Value),
|
||||
Patch = int.Parse(match.Groups["patch"].Value),
|
||||
PreRelease = match.Groups["prerelease"].Success
|
||||
? match.Groups["prerelease"].Value
|
||||
: null,
|
||||
Metadata = match.Groups["metadata"].Success
|
||||
? match.Groups["metadata"].Value
|
||||
: null
|
||||
};
|
||||
}
|
||||
|
||||
private static bool IsPreRelease(string tagName)
|
||||
{
|
||||
// Check common pre-release indicators
|
||||
if (PreReleasePattern.IsMatch(tagName))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Also check parsed semver
|
||||
var semver = new ImageDiscoveryService().ParseSemVer(tagName);
|
||||
return semver?.PreRelease != null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a parsed semantic version.
|
||||
/// </summary>
|
||||
public sealed record SemVer : IComparable<SemVer>
|
||||
{
|
||||
public int Major { get; init; }
|
||||
public int Minor { get; init; }
|
||||
public int Patch { get; init; }
|
||||
public string? PreRelease { get; init; }
|
||||
public string? Metadata { get; init; }
|
||||
|
||||
public int CompareTo(SemVer? other)
|
||||
{
|
||||
if (other is null) return 1;
|
||||
|
||||
var majorCompare = Major.CompareTo(other.Major);
|
||||
if (majorCompare != 0) return majorCompare;
|
||||
|
||||
var minorCompare = Minor.CompareTo(other.Minor);
|
||||
if (minorCompare != 0) return minorCompare;
|
||||
|
||||
var patchCompare = Patch.CompareTo(other.Patch);
|
||||
if (patchCompare != 0) return patchCompare;
|
||||
|
||||
// Pre-release versions have lower precedence than release versions
|
||||
if (PreRelease is null && other.PreRelease is not null) return 1;
|
||||
if (PreRelease is not null && other.PreRelease is null) return -1;
|
||||
if (PreRelease is null && other.PreRelease is null) return 0;
|
||||
|
||||
return string.Compare(PreRelease, other.PreRelease, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
var result = $"{Major}.{Minor}.{Patch}";
|
||||
if (!string.IsNullOrEmpty(PreRelease))
|
||||
{
|
||||
result += $"-{PreRelease}";
|
||||
}
|
||||
if (!string.IsNullOrEmpty(Metadata))
|
||||
{
|
||||
result += $"+{Metadata}";
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,511 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Sources.Configuration;
|
||||
using StellaOps.Scanner.Sources.Contracts;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using StellaOps.Scanner.Sources.Services;
|
||||
using StellaOps.Scanner.Sources.Triggers;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.Handlers.Git;
|
||||
|
||||
/// <summary>
|
||||
/// Handler for Git (repository) sources.
|
||||
/// Scans source code repositories for dependencies and vulnerabilities.
|
||||
/// </summary>
|
||||
public sealed class GitSourceHandler : ISourceTypeHandler, IWebhookCapableHandler
|
||||
{
|
||||
private readonly IGitClientFactory _gitClientFactory;
|
||||
private readonly ICredentialResolver _credentialResolver;
|
||||
private readonly ISourceConfigValidator _configValidator;
|
||||
private readonly ILogger<GitSourceHandler> _logger;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
public SbomSourceType SourceType => SbomSourceType.Git;
|
||||
public bool SupportsWebhooks => true;
|
||||
public bool SupportsScheduling => true;
|
||||
public int MaxConcurrentTargets => 10;
|
||||
|
||||
public GitSourceHandler(
|
||||
IGitClientFactory gitClientFactory,
|
||||
ICredentialResolver credentialResolver,
|
||||
ISourceConfigValidator configValidator,
|
||||
ILogger<GitSourceHandler> logger)
|
||||
{
|
||||
_gitClientFactory = gitClientFactory;
|
||||
_credentialResolver = credentialResolver;
|
||||
_configValidator = configValidator;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<ScanTarget>> DiscoverTargetsAsync(
|
||||
SbomSource source,
|
||||
TriggerContext context,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var config = source.Configuration.Deserialize<GitSourceConfig>(JsonOptions);
|
||||
if (config == null)
|
||||
{
|
||||
_logger.LogWarning("Invalid configuration for source {SourceId}", source.SourceId);
|
||||
return [];
|
||||
}
|
||||
|
||||
// For webhook triggers, extract target from payload
|
||||
if (context.Trigger == SbomSourceRunTrigger.Webhook)
|
||||
{
|
||||
if (context.WebhookPayload != null)
|
||||
{
|
||||
var payloadInfo = ParseWebhookPayload(context.WebhookPayload);
|
||||
|
||||
// Check if it matches configured triggers and branch filters
|
||||
if (!ShouldTrigger(payloadInfo, config))
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Webhook payload does not match triggers for source {SourceId}",
|
||||
source.SourceId);
|
||||
return [];
|
||||
}
|
||||
|
||||
return
|
||||
[
|
||||
new ScanTarget
|
||||
{
|
||||
Reference = BuildReference(config.RepositoryUrl, payloadInfo.Branch ?? payloadInfo.Reference),
|
||||
Metadata = new Dictionary<string, string>
|
||||
{
|
||||
["repository"] = config.RepositoryUrl,
|
||||
["branch"] = payloadInfo.Branch ?? "",
|
||||
["commit"] = payloadInfo.CommitSha ?? "",
|
||||
["eventType"] = payloadInfo.EventType,
|
||||
["actor"] = payloadInfo.Actor ?? "unknown"
|
||||
}
|
||||
}
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
// For scheduled/manual triggers, discover branches to scan
|
||||
return await DiscoverBranchTargetsAsync(source, config, ct);
|
||||
}
|
||||
|
||||
private async Task<IReadOnlyList<ScanTarget>> DiscoverBranchTargetsAsync(
|
||||
SbomSource source,
|
||||
GitSourceConfig config,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var credentials = await GetCredentialsAsync(source.AuthRef, config.AuthMethod, ct);
|
||||
using var client = _gitClientFactory.Create(config.Provider, config.RepositoryUrl, credentials);
|
||||
|
||||
var branches = await client.ListBranchesAsync(ct);
|
||||
var targets = new List<ScanTarget>();
|
||||
|
||||
foreach (var branch in branches)
|
||||
{
|
||||
// Check inclusion patterns
|
||||
var included = config.Branches.Include
|
||||
.Any(p => MatchesPattern(branch.Name, p));
|
||||
|
||||
if (!included)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check exclusion patterns
|
||||
var excluded = config.Branches.Exclude?
|
||||
.Any(p => MatchesPattern(branch.Name, p)) ?? false;
|
||||
|
||||
if (excluded)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
targets.Add(new ScanTarget
|
||||
{
|
||||
Reference = BuildReference(config.RepositoryUrl, branch.Name),
|
||||
Metadata = new Dictionary<string, string>
|
||||
{
|
||||
["repository"] = config.RepositoryUrl,
|
||||
["branch"] = branch.Name,
|
||||
["commit"] = branch.HeadCommit ?? "",
|
||||
["eventType"] = "scheduled"
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Discovered {Count} branch targets for source {SourceId}",
|
||||
targets.Count, source.SourceId);
|
||||
|
||||
return targets;
|
||||
}
|
||||
|
||||
public ConfigValidationResult ValidateConfiguration(JsonDocument configuration)
|
||||
{
|
||||
return _configValidator.Validate(SbomSourceType.Git, configuration);
|
||||
}
|
||||
|
||||
public async Task<ConnectionTestResult> TestConnectionAsync(
|
||||
SbomSource source,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var config = source.Configuration.Deserialize<GitSourceConfig>(JsonOptions);
|
||||
if (config == null)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Invalid configuration",
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var credentials = await GetCredentialsAsync(source.AuthRef, config.AuthMethod, ct);
|
||||
using var client = _gitClientFactory.Create(config.Provider, config.RepositoryUrl, credentials);
|
||||
|
||||
var repoInfo = await client.GetRepositoryInfoAsync(ct);
|
||||
if (repoInfo == null)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Repository not found or inaccessible",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = new Dictionary<string, object>
|
||||
{
|
||||
["repositoryUrl"] = config.RepositoryUrl,
|
||||
["provider"] = config.Provider.ToString()
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = true,
|
||||
Message = "Successfully connected to repository",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = new Dictionary<string, object>
|
||||
{
|
||||
["repositoryUrl"] = config.RepositoryUrl,
|
||||
["provider"] = config.Provider.ToString(),
|
||||
["defaultBranch"] = repoInfo.DefaultBranch ?? "",
|
||||
["sizeKb"] = repoInfo.SizeKb
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Connection test failed for source {SourceId}", source.SourceId);
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = $"Connection failed: {ex.Message}",
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public bool VerifyWebhookSignature(byte[] payload, string signature, string secret)
|
||||
{
|
||||
if (string.IsNullOrEmpty(signature) || string.IsNullOrEmpty(secret))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// GitHub uses HMAC-SHA256 with "sha256=" prefix
|
||||
if (signature.StartsWith("sha256=", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return VerifyHmacSha256(payload, signature[7..], secret);
|
||||
}
|
||||
|
||||
// GitHub legacy uses HMAC-SHA1 with "sha1=" prefix
|
||||
if (signature.StartsWith("sha1=", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return VerifyHmacSha1(payload, signature[5..], secret);
|
||||
}
|
||||
|
||||
// GitLab uses X-Gitlab-Token header (direct secret comparison)
|
||||
if (!signature.Contains('='))
|
||||
{
|
||||
return string.Equals(signature, secret, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public WebhookPayloadInfo ParseWebhookPayload(JsonDocument payload)
|
||||
{
|
||||
var root = payload.RootElement;
|
||||
|
||||
// GitHub push event
|
||||
if (root.TryGetProperty("ref", out var refProp) &&
|
||||
root.TryGetProperty("repository", out var ghRepo))
|
||||
{
|
||||
var refValue = refProp.GetString() ?? "";
|
||||
var branch = refValue.StartsWith("refs/heads/")
|
||||
? refValue[11..]
|
||||
: refValue.StartsWith("refs/tags/")
|
||||
? refValue[10..]
|
||||
: refValue;
|
||||
|
||||
var isTag = refValue.StartsWith("refs/tags/");
|
||||
|
||||
return new WebhookPayloadInfo
|
||||
{
|
||||
EventType = isTag ? "tag" : "push",
|
||||
Reference = ghRepo.TryGetProperty("full_name", out var fullName)
|
||||
? fullName.GetString()!
|
||||
: "",
|
||||
Branch = branch,
|
||||
CommitSha = root.TryGetProperty("after", out var after)
|
||||
? after.GetString()
|
||||
: null,
|
||||
Actor = root.TryGetProperty("sender", out var sender) &&
|
||||
sender.TryGetProperty("login", out var login)
|
||||
? login.GetString()
|
||||
: null,
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
// GitHub pull request event
|
||||
if (root.TryGetProperty("action", out var action) &&
|
||||
root.TryGetProperty("pull_request", out var pr))
|
||||
{
|
||||
return new WebhookPayloadInfo
|
||||
{
|
||||
EventType = "pull_request",
|
||||
Reference = root.TryGetProperty("repository", out var prRepo) &&
|
||||
prRepo.TryGetProperty("full_name", out var prFullName)
|
||||
? prFullName.GetString()!
|
||||
: "",
|
||||
Branch = pr.TryGetProperty("head", out var head) &&
|
||||
head.TryGetProperty("ref", out var headRef)
|
||||
? headRef.GetString()
|
||||
: null,
|
||||
CommitSha = head.TryGetProperty("sha", out var sha)
|
||||
? sha.GetString()
|
||||
: null,
|
||||
Actor = pr.TryGetProperty("user", out var user) &&
|
||||
user.TryGetProperty("login", out var prLogin)
|
||||
? prLogin.GetString()
|
||||
: null,
|
||||
Metadata = new Dictionary<string, string>
|
||||
{
|
||||
["action"] = action.GetString() ?? "",
|
||||
["prNumber"] = pr.TryGetProperty("number", out var num)
|
||||
? num.GetInt32().ToString()
|
||||
: ""
|
||||
},
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
// GitLab push event
|
||||
if (root.TryGetProperty("object_kind", out var objectKind))
|
||||
{
|
||||
var kind = objectKind.GetString();
|
||||
|
||||
if (kind == "push")
|
||||
{
|
||||
return new WebhookPayloadInfo
|
||||
{
|
||||
EventType = "push",
|
||||
Reference = root.TryGetProperty("project", out var project) &&
|
||||
project.TryGetProperty("path_with_namespace", out var path)
|
||||
? path.GetString()!
|
||||
: "",
|
||||
Branch = root.TryGetProperty("ref", out var glRef)
|
||||
? glRef.GetString()?.Replace("refs/heads/", "") ?? ""
|
||||
: null,
|
||||
CommitSha = root.TryGetProperty("after", out var glAfter)
|
||||
? glAfter.GetString()
|
||||
: null,
|
||||
Actor = root.TryGetProperty("user_name", out var userName)
|
||||
? userName.GetString()
|
||||
: null,
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
if (kind == "merge_request")
|
||||
{
|
||||
var mrAttrs = root.TryGetProperty("object_attributes", out var oa) ? oa : default;
|
||||
return new WebhookPayloadInfo
|
||||
{
|
||||
EventType = "pull_request",
|
||||
Reference = root.TryGetProperty("project", out var mrProject) &&
|
||||
mrProject.TryGetProperty("path_with_namespace", out var mrPath)
|
||||
? mrPath.GetString()!
|
||||
: "",
|
||||
Branch = mrAttrs.TryGetProperty("source_branch", out var srcBranch)
|
||||
? srcBranch.GetString()
|
||||
: null,
|
||||
CommitSha = mrAttrs.TryGetProperty("last_commit", out var lastCommit) &&
|
||||
lastCommit.TryGetProperty("id", out var commitId)
|
||||
? commitId.GetString()
|
||||
: null,
|
||||
Actor = root.TryGetProperty("user", out var glUser) &&
|
||||
glUser.TryGetProperty("username", out var glUsername)
|
||||
? glUsername.GetString()
|
||||
: null,
|
||||
Metadata = new Dictionary<string, string>
|
||||
{
|
||||
["action"] = mrAttrs.TryGetProperty("action", out var mrAction)
|
||||
? mrAction.GetString() ?? ""
|
||||
: ""
|
||||
},
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogWarning("Unable to parse Git webhook payload format");
|
||||
return new WebhookPayloadInfo
|
||||
{
|
||||
EventType = "unknown",
|
||||
Reference = "",
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private bool ShouldTrigger(WebhookPayloadInfo payload, GitSourceConfig config)
|
||||
{
|
||||
// Check event type against configured triggers
|
||||
switch (payload.EventType)
|
||||
{
|
||||
case "push":
|
||||
if (!config.Triggers.OnPush)
|
||||
return false;
|
||||
break;
|
||||
|
||||
case "tag":
|
||||
if (!config.Triggers.OnTag)
|
||||
return false;
|
||||
// Check tag patterns if specified
|
||||
if (config.Triggers.TagPatterns is { Length: > 0 })
|
||||
{
|
||||
if (!config.Triggers.TagPatterns.Any(p => MatchesPattern(payload.Branch ?? "", p)))
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
|
||||
case "pull_request":
|
||||
if (!config.Triggers.OnPullRequest)
|
||||
return false;
|
||||
// Check PR action if specified
|
||||
if (config.Triggers.PrActions is { Length: > 0 })
|
||||
{
|
||||
var actionStr = payload.Metadata.GetValueOrDefault("action", "");
|
||||
var matchedAction = Enum.TryParse<PullRequestAction>(actionStr, ignoreCase: true, out var action)
|
||||
&& config.Triggers.PrActions.Contains(action);
|
||||
if (!matchedAction)
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check branch filters (only for push and PR, not tags)
|
||||
if (payload.EventType != "tag" && !string.IsNullOrEmpty(payload.Branch))
|
||||
{
|
||||
var included = config.Branches.Include.Any(p => MatchesPattern(payload.Branch, p));
|
||||
if (!included)
|
||||
return false;
|
||||
|
||||
var excluded = config.Branches.Exclude?.Any(p => MatchesPattern(payload.Branch, p)) ?? false;
|
||||
if (excluded)
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private async Task<GitCredentials?> GetCredentialsAsync(
|
||||
string? authRef,
|
||||
GitAuthMethod authMethod,
|
||||
CancellationToken ct)
|
||||
{
|
||||
if (string.IsNullOrEmpty(authRef))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var resolved = await _credentialResolver.ResolveAsync(authRef, ct);
|
||||
if (resolved == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return authMethod switch
|
||||
{
|
||||
GitAuthMethod.Token => new GitCredentials
|
||||
{
|
||||
AuthType = GitAuthType.Token,
|
||||
Token = resolved.Token ?? resolved.Password
|
||||
},
|
||||
GitAuthMethod.Ssh => new GitCredentials
|
||||
{
|
||||
AuthType = GitAuthType.Ssh,
|
||||
SshPrivateKey = resolved.Properties?.GetValueOrDefault("privateKey"),
|
||||
SshPassphrase = resolved.Properties?.GetValueOrDefault("passphrase")
|
||||
},
|
||||
GitAuthMethod.OAuth => new GitCredentials
|
||||
{
|
||||
AuthType = GitAuthType.OAuth,
|
||||
Token = resolved.Token
|
||||
},
|
||||
GitAuthMethod.GitHubApp => new GitCredentials
|
||||
{
|
||||
AuthType = GitAuthType.GitHubApp,
|
||||
AppId = resolved.Properties?.GetValueOrDefault("appId"),
|
||||
PrivateKey = resolved.Properties?.GetValueOrDefault("privateKey"),
|
||||
InstallationId = resolved.Properties?.GetValueOrDefault("installationId")
|
||||
},
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
private static bool MatchesPattern(string value, string pattern)
|
||||
{
|
||||
var regexPattern = "^" + Regex.Escape(pattern)
|
||||
.Replace("\\*\\*", ".*")
|
||||
.Replace("\\*", "[^/]*")
|
||||
.Replace("\\?", ".") + "$";
|
||||
|
||||
return Regex.IsMatch(value, regexPattern, RegexOptions.IgnoreCase);
|
||||
}
|
||||
|
||||
private static string BuildReference(string repositoryUrl, string branchOrRef)
|
||||
{
|
||||
return $"{repositoryUrl}@{branchOrRef}";
|
||||
}
|
||||
|
||||
private static bool VerifyHmacSha256(byte[] payload, string expected, string secret)
|
||||
{
|
||||
using var hmac = new System.Security.Cryptography.HMACSHA256(
|
||||
System.Text.Encoding.UTF8.GetBytes(secret));
|
||||
var computed = Convert.ToHexString(hmac.ComputeHash(payload)).ToLowerInvariant();
|
||||
return System.Security.Cryptography.CryptographicOperations.FixedTimeEquals(
|
||||
System.Text.Encoding.UTF8.GetBytes(computed),
|
||||
System.Text.Encoding.UTF8.GetBytes(expected.ToLowerInvariant()));
|
||||
}
|
||||
|
||||
private static bool VerifyHmacSha1(byte[] payload, string expected, string secret)
|
||||
{
|
||||
using var hmac = new System.Security.Cryptography.HMACSHA1(
|
||||
System.Text.Encoding.UTF8.GetBytes(secret));
|
||||
var computed = Convert.ToHexString(hmac.ComputeHash(payload)).ToLowerInvariant();
|
||||
return System.Security.Cryptography.CryptographicOperations.FixedTimeEquals(
|
||||
System.Text.Encoding.UTF8.GetBytes(computed),
|
||||
System.Text.Encoding.UTF8.GetBytes(expected.ToLowerInvariant()));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,172 @@
|
||||
using StellaOps.Scanner.Sources.Configuration;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.Handlers.Git;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for interacting with Git repositories via API.
|
||||
/// </summary>
|
||||
public interface IGitClient : IDisposable
|
||||
{
|
||||
/// <summary>
|
||||
/// Get repository information.
|
||||
/// </summary>
|
||||
Task<RepositoryInfo?> GetRepositoryInfoAsync(CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// List branches in the repository.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<BranchInfo>> ListBranchesAsync(CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// List tags in the repository.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<TagInfo>> ListTagsAsync(CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get commit information.
|
||||
/// </summary>
|
||||
Task<CommitInfo?> GetCommitAsync(string sha, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Factory for creating Git clients.
|
||||
/// </summary>
|
||||
public interface IGitClientFactory
|
||||
{
|
||||
/// <summary>
|
||||
/// Create a Git client for the specified provider.
|
||||
/// </summary>
|
||||
IGitClient Create(
|
||||
GitProvider provider,
|
||||
string repositoryUrl,
|
||||
GitCredentials? credentials = null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Credentials for Git repository authentication.
|
||||
/// </summary>
|
||||
public sealed record GitCredentials
|
||||
{
|
||||
/// <summary>Type of authentication.</summary>
|
||||
public required GitAuthType AuthType { get; init; }
|
||||
|
||||
/// <summary>Personal access token or OAuth token.</summary>
|
||||
public string? Token { get; init; }
|
||||
|
||||
/// <summary>SSH private key content.</summary>
|
||||
public string? SshPrivateKey { get; init; }
|
||||
|
||||
/// <summary>SSH key passphrase.</summary>
|
||||
public string? SshPassphrase { get; init; }
|
||||
|
||||
/// <summary>GitHub App ID.</summary>
|
||||
public string? AppId { get; init; }
|
||||
|
||||
/// <summary>GitHub App private key.</summary>
|
||||
public string? PrivateKey { get; init; }
|
||||
|
||||
/// <summary>GitHub App installation ID.</summary>
|
||||
public string? InstallationId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Git authentication types.
|
||||
/// </summary>
|
||||
public enum GitAuthType
|
||||
{
|
||||
None,
|
||||
Token,
|
||||
Ssh,
|
||||
OAuth,
|
||||
GitHubApp
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Repository information.
|
||||
/// </summary>
|
||||
public sealed record RepositoryInfo
|
||||
{
|
||||
/// <summary>Repository name.</summary>
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>Full path or full name.</summary>
|
||||
public required string FullName { get; init; }
|
||||
|
||||
/// <summary>Default branch name.</summary>
|
||||
public string? DefaultBranch { get; init; }
|
||||
|
||||
/// <summary>Repository size in KB.</summary>
|
||||
public long SizeKb { get; init; }
|
||||
|
||||
/// <summary>Whether the repository is private.</summary>
|
||||
public bool IsPrivate { get; init; }
|
||||
|
||||
/// <summary>Repository description.</summary>
|
||||
public string? Description { get; init; }
|
||||
|
||||
/// <summary>Clone URL (HTTPS).</summary>
|
||||
public string? CloneUrl { get; init; }
|
||||
|
||||
/// <summary>SSH clone URL.</summary>
|
||||
public string? SshUrl { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Branch information.
|
||||
/// </summary>
|
||||
public sealed record BranchInfo
|
||||
{
|
||||
/// <summary>Branch name.</summary>
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>HEAD commit SHA.</summary>
|
||||
public string? HeadCommit { get; init; }
|
||||
|
||||
/// <summary>Whether this is the default branch.</summary>
|
||||
public bool IsDefault { get; init; }
|
||||
|
||||
/// <summary>Whether the branch is protected.</summary>
|
||||
public bool IsProtected { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tag information.
|
||||
/// </summary>
|
||||
public sealed record TagInfo
|
||||
{
|
||||
/// <summary>Tag name.</summary>
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>Commit SHA the tag points to.</summary>
|
||||
public string? CommitSha { get; init; }
|
||||
|
||||
/// <summary>Tag message (for annotated tags).</summary>
|
||||
public string? Message { get; init; }
|
||||
|
||||
/// <summary>When the tag was created.</summary>
|
||||
public DateTimeOffset? CreatedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Commit information.
|
||||
/// </summary>
|
||||
public sealed record CommitInfo
|
||||
{
|
||||
/// <summary>Commit SHA.</summary>
|
||||
public required string Sha { get; init; }
|
||||
|
||||
/// <summary>Commit message.</summary>
|
||||
public string? Message { get; init; }
|
||||
|
||||
/// <summary>Author name.</summary>
|
||||
public string? AuthorName { get; init; }
|
||||
|
||||
/// <summary>Author email.</summary>
|
||||
public string? AuthorEmail { get; init; }
|
||||
|
||||
/// <summary>When the commit was authored.</summary>
|
||||
public DateTimeOffset? AuthoredAt { get; init; }
|
||||
|
||||
/// <summary>Parent commit SHAs.</summary>
|
||||
public IReadOnlyList<string> Parents { get; init; } = [];
|
||||
}
|
||||
@@ -0,0 +1,113 @@
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Sources.Configuration;
|
||||
using StellaOps.Scanner.Sources.Contracts;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using StellaOps.Scanner.Sources.Triggers;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.Handlers;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for source type-specific handlers.
|
||||
/// Each source type (Zastava, Docker, CLI, Git) has its own handler.
|
||||
/// </summary>
|
||||
public interface ISourceTypeHandler
|
||||
{
|
||||
/// <summary>The source type this handler manages.</summary>
|
||||
SbomSourceType SourceType { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Discover targets to scan based on source configuration and trigger context.
|
||||
/// </summary>
|
||||
/// <param name="source">The source configuration.</param>
|
||||
/// <param name="context">The trigger context.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>List of targets to scan.</returns>
|
||||
Task<IReadOnlyList<ScanTarget>> DiscoverTargetsAsync(
|
||||
SbomSource source,
|
||||
TriggerContext context,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Validate source configuration.
|
||||
/// </summary>
|
||||
/// <param name="configuration">The configuration to validate.</param>
|
||||
/// <returns>Validation result.</returns>
|
||||
ConfigValidationResult ValidateConfiguration(JsonDocument configuration);
|
||||
|
||||
/// <summary>
|
||||
/// Test connection to the source.
|
||||
/// </summary>
|
||||
/// <param name="source">The source to test.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Connection test result.</returns>
|
||||
Task<ConnectionTestResult> TestConnectionAsync(
|
||||
SbomSource source,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the maximum number of concurrent targets this handler supports.
|
||||
/// </summary>
|
||||
int MaxConcurrentTargets => 10;
|
||||
|
||||
/// <summary>
|
||||
/// Whether this handler supports webhook triggers.
|
||||
/// </summary>
|
||||
bool SupportsWebhooks => false;
|
||||
|
||||
/// <summary>
|
||||
/// Whether this handler supports scheduled triggers.
|
||||
/// </summary>
|
||||
bool SupportsScheduling => true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extended interface for handlers that can process webhooks.
|
||||
/// </summary>
|
||||
public interface IWebhookCapableHandler : ISourceTypeHandler
|
||||
{
|
||||
/// <summary>
|
||||
/// Verify webhook signature.
|
||||
/// </summary>
|
||||
bool VerifyWebhookSignature(
|
||||
byte[] payload,
|
||||
string signature,
|
||||
string secret);
|
||||
|
||||
/// <summary>
|
||||
/// Parse webhook payload to extract trigger information.
|
||||
/// </summary>
|
||||
WebhookPayloadInfo ParseWebhookPayload(JsonDocument payload);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parsed webhook payload information.
|
||||
/// </summary>
|
||||
public sealed record WebhookPayloadInfo
|
||||
{
|
||||
/// <summary>Type of event (push, tag, delete, etc.).</summary>
|
||||
public required string EventType { get; init; }
|
||||
|
||||
/// <summary>Repository or image reference.</summary>
|
||||
public required string Reference { get; init; }
|
||||
|
||||
/// <summary>Tag if applicable.</summary>
|
||||
public string? Tag { get; init; }
|
||||
|
||||
/// <summary>Digest if applicable.</summary>
|
||||
public string? Digest { get; init; }
|
||||
|
||||
/// <summary>Branch if applicable (git webhooks).</summary>
|
||||
public string? Branch { get; init; }
|
||||
|
||||
/// <summary>Commit SHA if applicable (git webhooks).</summary>
|
||||
public string? CommitSha { get; init; }
|
||||
|
||||
/// <summary>User who triggered the event.</summary>
|
||||
public string? Actor { get; init; }
|
||||
|
||||
/// <summary>Timestamp of the event.</summary>
|
||||
public DateTimeOffset Timestamp { get; init; } = DateTimeOffset.UtcNow;
|
||||
|
||||
/// <summary>Additional metadata from the payload.</summary>
|
||||
public Dictionary<string, string> Metadata { get; init; } = [];
|
||||
}
|
||||
@@ -0,0 +1,128 @@
|
||||
namespace StellaOps.Scanner.Sources.Handlers.Zastava;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for interacting with container registries.
|
||||
/// </summary>
|
||||
public interface IRegistryClient : IDisposable
|
||||
{
|
||||
/// <summary>
|
||||
/// Test connectivity to the registry.
|
||||
/// </summary>
|
||||
Task<bool> PingAsync(CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// List repositories matching a pattern.
|
||||
/// </summary>
|
||||
/// <param name="pattern">Glob pattern (e.g., "library/*").</param>
|
||||
/// <param name="limit">Maximum number of repositories to return.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
Task<IReadOnlyList<string>> ListRepositoriesAsync(
|
||||
string? pattern = null,
|
||||
int limit = 100,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// List tags for a repository.
|
||||
/// </summary>
|
||||
/// <param name="repository">Repository name.</param>
|
||||
/// <param name="patterns">Tag patterns to match (null = all).</param>
|
||||
/// <param name="limit">Maximum number of tags to return.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
Task<IReadOnlyList<RegistryTag>> ListTagsAsync(
|
||||
string repository,
|
||||
IReadOnlyList<string>? patterns = null,
|
||||
int limit = 100,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get manifest digest for an image reference.
|
||||
/// </summary>
|
||||
Task<string?> GetDigestAsync(
|
||||
string repository,
|
||||
string tag,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a tag in a container registry.
|
||||
/// </summary>
|
||||
public sealed record RegistryTag
|
||||
{
|
||||
/// <summary>The tag name.</summary>
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>The manifest digest.</summary>
|
||||
public string? Digest { get; init; }
|
||||
|
||||
/// <summary>When the tag was last updated.</summary>
|
||||
public DateTimeOffset? LastUpdated { get; init; }
|
||||
|
||||
/// <summary>Size of the image in bytes.</summary>
|
||||
public long? SizeBytes { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Factory for creating registry clients.
|
||||
/// </summary>
|
||||
public interface IRegistryClientFactory
|
||||
{
|
||||
/// <summary>
|
||||
/// Create a registry client for the specified registry.
|
||||
/// </summary>
|
||||
IRegistryClient Create(
|
||||
Configuration.RegistryType registryType,
|
||||
string registryUrl,
|
||||
RegistryCredentials? credentials = null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Credentials for registry authentication.
|
||||
/// </summary>
|
||||
public sealed record RegistryCredentials
|
||||
{
|
||||
/// <summary>Type of authentication.</summary>
|
||||
public required RegistryAuthType AuthType { get; init; }
|
||||
|
||||
/// <summary>Username for basic auth.</summary>
|
||||
public string? Username { get; init; }
|
||||
|
||||
/// <summary>Password or token for basic auth.</summary>
|
||||
public string? Password { get; init; }
|
||||
|
||||
/// <summary>Bearer token for token auth.</summary>
|
||||
public string? Token { get; init; }
|
||||
|
||||
/// <summary>AWS access key for ECR.</summary>
|
||||
public string? AwsAccessKey { get; init; }
|
||||
|
||||
/// <summary>AWS secret key for ECR.</summary>
|
||||
public string? AwsSecretKey { get; init; }
|
||||
|
||||
/// <summary>AWS region for ECR.</summary>
|
||||
public string? AwsRegion { get; init; }
|
||||
|
||||
/// <summary>GCP service account JSON for GCR.</summary>
|
||||
public string? GcpServiceAccountJson { get; init; }
|
||||
|
||||
/// <summary>Azure client ID for ACR.</summary>
|
||||
public string? AzureClientId { get; init; }
|
||||
|
||||
/// <summary>Azure client secret for ACR.</summary>
|
||||
public string? AzureClientSecret { get; init; }
|
||||
|
||||
/// <summary>Azure tenant ID for ACR.</summary>
|
||||
public string? AzureTenantId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Registry authentication types.
|
||||
/// </summary>
|
||||
public enum RegistryAuthType
|
||||
{
|
||||
None,
|
||||
Basic,
|
||||
Token,
|
||||
AwsEcr,
|
||||
GcpGcr,
|
||||
AzureAcr
|
||||
}
|
||||
@@ -0,0 +1,456 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Sources.Configuration;
|
||||
using StellaOps.Scanner.Sources.Contracts;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using StellaOps.Scanner.Sources.Services;
|
||||
using StellaOps.Scanner.Sources.Triggers;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.Handlers.Zastava;
|
||||
|
||||
/// <summary>
|
||||
/// Handler for Zastava (container registry webhook) sources.
|
||||
/// </summary>
|
||||
public sealed class ZastavaSourceHandler : ISourceTypeHandler, IWebhookCapableHandler
|
||||
{
|
||||
private readonly IRegistryClientFactory _clientFactory;
|
||||
private readonly ICredentialResolver _credentialResolver;
|
||||
private readonly ISourceConfigValidator _configValidator;
|
||||
private readonly ILogger<ZastavaSourceHandler> _logger;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
public SbomSourceType SourceType => SbomSourceType.Zastava;
|
||||
public bool SupportsWebhooks => true;
|
||||
public bool SupportsScheduling => true;
|
||||
public int MaxConcurrentTargets => 20;
|
||||
|
||||
public ZastavaSourceHandler(
|
||||
IRegistryClientFactory clientFactory,
|
||||
ICredentialResolver credentialResolver,
|
||||
ISourceConfigValidator configValidator,
|
||||
ILogger<ZastavaSourceHandler> logger)
|
||||
{
|
||||
_clientFactory = clientFactory;
|
||||
_credentialResolver = credentialResolver;
|
||||
_configValidator = configValidator;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<ScanTarget>> DiscoverTargetsAsync(
|
||||
SbomSource source,
|
||||
TriggerContext context,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var config = source.Configuration.Deserialize<ZastavaSourceConfig>(JsonOptions);
|
||||
if (config == null)
|
||||
{
|
||||
_logger.LogWarning("Invalid configuration for source {SourceId}", source.SourceId);
|
||||
return [];
|
||||
}
|
||||
|
||||
// For webhook triggers, extract target from payload
|
||||
if (context.Trigger == SbomSourceRunTrigger.Webhook)
|
||||
{
|
||||
if (context.WebhookPayload != null)
|
||||
{
|
||||
var payloadInfo = ParseWebhookPayload(context.WebhookPayload);
|
||||
|
||||
// Check if it matches filters
|
||||
if (!MatchesFilters(payloadInfo, config.Filters))
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Webhook payload does not match filters for source {SourceId}",
|
||||
source.SourceId);
|
||||
return [];
|
||||
}
|
||||
|
||||
var reference = BuildReference(config.RegistryUrl, payloadInfo.Reference, payloadInfo.Tag);
|
||||
return
|
||||
[
|
||||
new ScanTarget
|
||||
{
|
||||
Reference = reference,
|
||||
Digest = payloadInfo.Digest,
|
||||
Metadata = new Dictionary<string, string>
|
||||
{
|
||||
["repository"] = payloadInfo.Reference,
|
||||
["tag"] = payloadInfo.Tag ?? "latest",
|
||||
["pushedBy"] = payloadInfo.Actor ?? "unknown",
|
||||
["eventType"] = payloadInfo.EventType
|
||||
}
|
||||
}
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
// For scheduled/manual triggers, discover from registry
|
||||
return await DiscoverFromRegistryAsync(source, config, ct);
|
||||
}
|
||||
|
||||
private async Task<IReadOnlyList<ScanTarget>> DiscoverFromRegistryAsync(
|
||||
SbomSource source,
|
||||
ZastavaSourceConfig config,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var credentials = await GetCredentialsAsync(source.AuthRef, ct);
|
||||
using var client = _clientFactory.Create(config.RegistryType, config.RegistryUrl, credentials);
|
||||
|
||||
var targets = new List<ScanTarget>();
|
||||
var repoPatterns = config.Filters?.RepositoryPatterns ?? ["*"];
|
||||
|
||||
foreach (var pattern in repoPatterns)
|
||||
{
|
||||
var repos = await client.ListRepositoriesAsync(pattern, 100, ct);
|
||||
|
||||
foreach (var repo in repos)
|
||||
{
|
||||
// Check exclusions
|
||||
if (config.Filters?.ExcludePatterns?.Any(ex => MatchesPattern(repo, ex)) == true)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var tagPatterns = config.Filters?.TagPatterns ?? ["*"];
|
||||
var tags = await client.ListTagsAsync(repo, tagPatterns, 50, ct);
|
||||
|
||||
foreach (var tag in tags)
|
||||
{
|
||||
// Check tag exclusions
|
||||
if (config.Filters?.ExcludePatterns?.Any(ex => MatchesPattern(tag.Name, ex)) == true)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var reference = BuildReference(config.RegistryUrl, repo, tag.Name);
|
||||
targets.Add(new ScanTarget
|
||||
{
|
||||
Reference = reference,
|
||||
Digest = tag.Digest,
|
||||
Metadata = new Dictionary<string, string>
|
||||
{
|
||||
["repository"] = repo,
|
||||
["tag"] = tag.Name
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Discovered {Count} targets from registry for source {SourceId}",
|
||||
targets.Count, source.SourceId);
|
||||
|
||||
return targets;
|
||||
}
|
||||
|
||||
public ConfigValidationResult ValidateConfiguration(JsonDocument configuration)
|
||||
{
|
||||
return _configValidator.Validate(SbomSourceType.Zastava, configuration);
|
||||
}
|
||||
|
||||
public async Task<ConnectionTestResult> TestConnectionAsync(
|
||||
SbomSource source,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var config = source.Configuration.Deserialize<ZastavaSourceConfig>(JsonOptions);
|
||||
if (config == null)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Invalid configuration",
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var credentials = await GetCredentialsAsync(source.AuthRef, ct);
|
||||
using var client = _clientFactory.Create(config.RegistryType, config.RegistryUrl, credentials);
|
||||
|
||||
var pingSuccess = await client.PingAsync(ct);
|
||||
if (!pingSuccess)
|
||||
{
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = "Registry ping failed",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = new Dictionary<string, object>
|
||||
{
|
||||
["registryUrl"] = config.RegistryUrl,
|
||||
["registryType"] = config.RegistryType.ToString()
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Try to list repositories to verify access
|
||||
var repos = await client.ListRepositoriesAsync(limit: 1, ct: ct);
|
||||
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = true,
|
||||
Message = "Successfully connected to registry",
|
||||
TestedAt = DateTimeOffset.UtcNow,
|
||||
Details = new Dictionary<string, object>
|
||||
{
|
||||
["registryUrl"] = config.RegistryUrl,
|
||||
["registryType"] = config.RegistryType.ToString(),
|
||||
["repositoriesAccessible"] = repos.Count > 0
|
||||
}
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Connection test failed for source {SourceId}", source.SourceId);
|
||||
return new ConnectionTestResult
|
||||
{
|
||||
Success = false,
|
||||
Message = $"Connection failed: {ex.Message}",
|
||||
TestedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public bool VerifyWebhookSignature(byte[] payload, string signature, string secret)
|
||||
{
|
||||
// Support multiple signature formats
|
||||
// Docker Hub: X-Hub-Signature (SHA1)
|
||||
// Harbor: Authorization header with shared secret
|
||||
// Generic: HMAC-SHA256
|
||||
|
||||
if (string.IsNullOrEmpty(signature) || string.IsNullOrEmpty(secret))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Try HMAC-SHA256 first (most common)
|
||||
var secretBytes = Encoding.UTF8.GetBytes(secret);
|
||||
using var hmac256 = new HMACSHA256(secretBytes);
|
||||
var computed256 = Convert.ToHexString(hmac256.ComputeHash(payload)).ToLowerInvariant();
|
||||
|
||||
if (signature.StartsWith("sha256=", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var expected = signature[7..].ToLowerInvariant();
|
||||
return CryptographicOperations.FixedTimeEquals(
|
||||
Encoding.UTF8.GetBytes(computed256),
|
||||
Encoding.UTF8.GetBytes(expected));
|
||||
}
|
||||
|
||||
// Try SHA1 (Docker Hub legacy)
|
||||
using var hmac1 = new HMACSHA1(secretBytes);
|
||||
var computed1 = Convert.ToHexString(hmac1.ComputeHash(payload)).ToLowerInvariant();
|
||||
|
||||
if (signature.StartsWith("sha1=", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var expected = signature[5..].ToLowerInvariant();
|
||||
return CryptographicOperations.FixedTimeEquals(
|
||||
Encoding.UTF8.GetBytes(computed1),
|
||||
Encoding.UTF8.GetBytes(expected));
|
||||
}
|
||||
|
||||
// Plain comparison (Harbor style)
|
||||
return CryptographicOperations.FixedTimeEquals(
|
||||
Encoding.UTF8.GetBytes(signature),
|
||||
Encoding.UTF8.GetBytes(secret));
|
||||
}
|
||||
|
||||
public WebhookPayloadInfo ParseWebhookPayload(JsonDocument payload)
|
||||
{
|
||||
var root = payload.RootElement;
|
||||
|
||||
// Try different webhook formats
|
||||
|
||||
// Docker Hub format
|
||||
if (root.TryGetProperty("push_data", out var pushData) &&
|
||||
root.TryGetProperty("repository", out var repository))
|
||||
{
|
||||
return new WebhookPayloadInfo
|
||||
{
|
||||
EventType = "push",
|
||||
Reference = repository.TryGetProperty("repo_name", out var repoName)
|
||||
? repoName.GetString()!
|
||||
: repository.GetProperty("name").GetString()!,
|
||||
Tag = pushData.TryGetProperty("tag", out var tag) ? tag.GetString() : "latest",
|
||||
Actor = pushData.TryGetProperty("pusher", out var pusher) ? pusher.GetString() : null,
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
// Harbor format
|
||||
if (root.TryGetProperty("type", out var eventType) &&
|
||||
root.TryGetProperty("event_data", out var eventData))
|
||||
{
|
||||
var resources = eventData.TryGetProperty("resources", out var res) ? res : default;
|
||||
var firstResource = resources.ValueKind == JsonValueKind.Array && resources.GetArrayLength() > 0
|
||||
? resources[0]
|
||||
: default;
|
||||
|
||||
return new WebhookPayloadInfo
|
||||
{
|
||||
EventType = eventType.GetString() ?? "push",
|
||||
Reference = eventData.TryGetProperty("repository", out var repo)
|
||||
? (repo.TryGetProperty("repo_full_name", out var fullName)
|
||||
? fullName.GetString()!
|
||||
: repo.GetProperty("name").GetString()!)
|
||||
: "",
|
||||
Tag = firstResource.TryGetProperty("tag", out var harborTag)
|
||||
? harborTag.GetString()
|
||||
: null,
|
||||
Digest = firstResource.TryGetProperty("digest", out var digest)
|
||||
? digest.GetString()
|
||||
: null,
|
||||
Actor = eventData.TryGetProperty("operator", out var op) ? op.GetString() : null,
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
// Generic OCI distribution format
|
||||
if (root.TryGetProperty("events", out var events) &&
|
||||
events.ValueKind == JsonValueKind.Array &&
|
||||
events.GetArrayLength() > 0)
|
||||
{
|
||||
var firstEvent = events[0];
|
||||
return new WebhookPayloadInfo
|
||||
{
|
||||
EventType = firstEvent.TryGetProperty("action", out var action)
|
||||
? action.GetString() ?? "push"
|
||||
: "push",
|
||||
Reference = firstEvent.TryGetProperty("target", out var target) &&
|
||||
target.TryGetProperty("repository", out var targetRepo)
|
||||
? targetRepo.GetString()!
|
||||
: "",
|
||||
Tag = target.TryGetProperty("tag", out var ociTag)
|
||||
? ociTag.GetString()
|
||||
: null,
|
||||
Digest = target.TryGetProperty("digest", out var ociDigest)
|
||||
? ociDigest.GetString()
|
||||
: null,
|
||||
Actor = firstEvent.TryGetProperty("actor", out var actor) &&
|
||||
actor.TryGetProperty("name", out var actorName)
|
||||
? actorName.GetString()
|
||||
: null,
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
_logger.LogWarning("Unable to parse webhook payload format");
|
||||
return new WebhookPayloadInfo
|
||||
{
|
||||
EventType = "unknown",
|
||||
Reference = "",
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private async Task<RegistryCredentials?> GetCredentialsAsync(string? authRef, CancellationToken ct)
|
||||
{
|
||||
if (string.IsNullOrEmpty(authRef))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var resolved = await _credentialResolver.ResolveAsync(authRef, ct);
|
||||
if (resolved == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return resolved.Type switch
|
||||
{
|
||||
CredentialType.BasicAuth => new RegistryCredentials
|
||||
{
|
||||
AuthType = RegistryAuthType.Basic,
|
||||
Username = resolved.Username,
|
||||
Password = resolved.Password
|
||||
},
|
||||
CredentialType.BearerToken => new RegistryCredentials
|
||||
{
|
||||
AuthType = RegistryAuthType.Token,
|
||||
Token = resolved.Token
|
||||
},
|
||||
CredentialType.AwsCredentials => new RegistryCredentials
|
||||
{
|
||||
AuthType = RegistryAuthType.AwsEcr,
|
||||
AwsAccessKey = resolved.Properties?.GetValueOrDefault("accessKey"),
|
||||
AwsSecretKey = resolved.Properties?.GetValueOrDefault("secretKey"),
|
||||
AwsRegion = resolved.Properties?.GetValueOrDefault("region")
|
||||
},
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
private static bool MatchesFilters(WebhookPayloadInfo payload, ZastavaFilters? filters)
|
||||
{
|
||||
if (filters == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check repository patterns
|
||||
if (filters.RepositoryPatterns?.Count > 0)
|
||||
{
|
||||
if (!filters.RepositoryPatterns.Any(p => MatchesPattern(payload.Reference, p)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Check tag patterns
|
||||
if (filters.TagPatterns?.Count > 0 && payload.Tag != null)
|
||||
{
|
||||
if (!filters.TagPatterns.Any(p => MatchesPattern(payload.Tag, p)))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Check exclusions
|
||||
if (filters.ExcludePatterns?.Count > 0)
|
||||
{
|
||||
if (filters.ExcludePatterns.Any(p =>
|
||||
MatchesPattern(payload.Reference, p) ||
|
||||
(payload.Tag != null && MatchesPattern(payload.Tag, p))))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool MatchesPattern(string value, string pattern)
|
||||
{
|
||||
// Convert glob pattern to regex
|
||||
var regexPattern = "^" + Regex.Escape(pattern)
|
||||
.Replace("\\*", ".*")
|
||||
.Replace("\\?", ".") + "$";
|
||||
|
||||
return Regex.IsMatch(value, regexPattern, RegexOptions.IgnoreCase);
|
||||
}
|
||||
|
||||
private static string BuildReference(string registryUrl, string repository, string? tag)
|
||||
{
|
||||
var host = new Uri(registryUrl).Host;
|
||||
|
||||
// Docker Hub special case
|
||||
if (host.Contains("docker.io") || host.Contains("docker.com"))
|
||||
{
|
||||
if (!repository.Contains('/'))
|
||||
{
|
||||
repository = $"library/{repository}";
|
||||
}
|
||||
return $"{repository}:{tag ?? "latest"}";
|
||||
}
|
||||
|
||||
return $"{host}/{repository}:{tag ?? "latest"}";
|
||||
}
|
||||
}
|
||||
@@ -53,6 +53,18 @@ public interface ISbomSourceRepository
|
||||
/// Check if a source name exists in the tenant.
|
||||
/// </summary>
|
||||
Task<bool> NameExistsAsync(string tenantId, string name, Guid? excludeSourceId = null, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Search for sources by name across all tenants.
|
||||
/// Used for webhook routing where tenant is not known upfront.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<SbomSource>> SearchByNameAsync(string name, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get sources that are due for scheduled execution.
|
||||
/// Alias for GetDueScheduledSourcesAsync for dispatcher compatibility.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<SbomSource>> GetDueForScheduledRunAsync(CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -122,11 +122,11 @@ public sealed class SbomSourceRepository : RepositoryBase<ScannerSourcesDataSour
|
||||
MapSource,
|
||||
ct);
|
||||
|
||||
var totalCount = await ExecuteScalarAsync<long>(
|
||||
var totalCount = (await ExecuteScalarAsync<long>(
|
||||
tenantId,
|
||||
countSb.ToString(),
|
||||
AddFilters,
|
||||
ct) ?? 0;
|
||||
ct)).Value;
|
||||
|
||||
string? nextCursor = null;
|
||||
if (items.Count > request.Limit)
|
||||
@@ -296,6 +296,30 @@ public sealed class SbomSourceRepository : RepositoryBase<ScannerSourcesDataSour
|
||||
ct);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<SbomSource>> SearchByNameAsync(
|
||||
string name,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
const string sql = $"""
|
||||
SELECT * FROM {FullTable}
|
||||
WHERE name = @name
|
||||
LIMIT 10
|
||||
""";
|
||||
|
||||
// Cross-tenant search, use system context
|
||||
return await QueryAsync(
|
||||
"__system__",
|
||||
sql,
|
||||
cmd => AddParameter(cmd, "name", name),
|
||||
MapSource,
|
||||
ct);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<SbomSource>> GetDueForScheduledRunAsync(CancellationToken ct = default)
|
||||
{
|
||||
return GetDueScheduledSourcesAsync(DateTimeOffset.UtcNow, 100, ct);
|
||||
}
|
||||
|
||||
private void ConfigureSourceParams(NpgsqlCommand cmd, SbomSource source)
|
||||
{
|
||||
AddParameter(cmd, "sourceId", source.SourceId);
|
||||
|
||||
@@ -98,11 +98,12 @@ public sealed class SbomSourceRunRepository : RepositoryBase<ScannerSourcesDataS
|
||||
MapRun,
|
||||
ct);
|
||||
|
||||
var totalCount = await ExecuteScalarAsync<long>(
|
||||
var totalCountResult = await ExecuteScalarAsync<long>(
|
||||
"__system__",
|
||||
countSb.ToString(),
|
||||
AddFilters,
|
||||
ct) ?? 0;
|
||||
ct);
|
||||
var totalCount = totalCountResult.GetValueOrDefault();
|
||||
|
||||
string? nextCursor = null;
|
||||
if (items.Count > request.Limit)
|
||||
|
||||
@@ -10,13 +10,21 @@ namespace StellaOps.Scanner.Sources.Persistence;
|
||||
/// </summary>
|
||||
public sealed class ScannerSourcesDataSource : DataSourceBase
|
||||
{
|
||||
/// <summary>
|
||||
/// Default schema name for Scanner Sources tables.
|
||||
/// </summary>
|
||||
public const string DefaultSchemaName = "sources";
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new Scanner Sources data source.
|
||||
/// </summary>
|
||||
public ScannerSourcesDataSource(
|
||||
IOptions<PostgresOptions> options,
|
||||
ILogger<ScannerSourcesDataSource> logger)
|
||||
: base(options, logger)
|
||||
: base(options.Value, logger)
|
||||
{
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override string ModuleName => "ScannerSources";
|
||||
}
|
||||
|
||||
@@ -0,0 +1,115 @@
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Sources.Triggers;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.Scheduling;
|
||||
|
||||
/// <summary>
|
||||
/// Background service that processes scheduled SBOM sources.
|
||||
/// </summary>
|
||||
public sealed partial class SourceSchedulerHostedService : BackgroundService
|
||||
{
|
||||
private readonly ISourceTriggerDispatcher _dispatcher;
|
||||
private readonly IOptionsMonitor<SourceSchedulerOptions> _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<SourceSchedulerHostedService> _logger;
|
||||
|
||||
public SourceSchedulerHostedService(
|
||||
ISourceTriggerDispatcher dispatcher,
|
||||
IOptionsMonitor<SourceSchedulerOptions> options,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<SourceSchedulerHostedService> logger)
|
||||
{
|
||||
_dispatcher = dispatcher ?? throw new ArgumentNullException(nameof(dispatcher));
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
_logger.LogInformation("Source scheduler started");
|
||||
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
await ProcessScheduledSourcesAsync(stoppingToken);
|
||||
}
|
||||
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Source scheduler encountered an error");
|
||||
}
|
||||
|
||||
var options = _options.CurrentValue;
|
||||
await Task.Delay(options.CheckInterval, _timeProvider, stoppingToken);
|
||||
}
|
||||
|
||||
_logger.LogInformation("Source scheduler stopping");
|
||||
}
|
||||
|
||||
private async Task ProcessScheduledSourcesAsync(CancellationToken ct)
|
||||
{
|
||||
var options = _options.CurrentValue;
|
||||
|
||||
if (!options.Enabled)
|
||||
{
|
||||
_logger.LogDebug("Source scheduler is disabled");
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var processed = await _dispatcher.ProcessScheduledSourcesAsync(ct);
|
||||
|
||||
if (processed > 0)
|
||||
{
|
||||
_logger.LogInformation("Processed {Count} scheduled sources", processed);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug("No scheduled sources due for processing");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to process scheduled sources");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for the source scheduler.
|
||||
/// </summary>
|
||||
public sealed class SourceSchedulerOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the scheduler is enabled.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// How often to check for due scheduled sources.
|
||||
/// </summary>
|
||||
public TimeSpan CheckInterval { get; set; } = TimeSpan.FromMinutes(1);
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of sources to process in a single batch.
|
||||
/// </summary>
|
||||
public int MaxBatchSize { get; set; } = 50;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to allow scheduling sources that have never run.
|
||||
/// </summary>
|
||||
public bool AllowFirstRun { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Minimum interval between runs for the same source (to prevent rapid re-triggering).
|
||||
/// </summary>
|
||||
public TimeSpan MinRunInterval { get; set; } = TimeSpan.FromMinutes(5);
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
namespace StellaOps.Scanner.Sources.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Credential types supported by the resolver.
|
||||
/// </summary>
|
||||
public enum CredentialType
|
||||
{
|
||||
None,
|
||||
BearerToken,
|
||||
BasicAuth,
|
||||
SshKey,
|
||||
AwsCredentials,
|
||||
GcpServiceAccount,
|
||||
AzureServicePrincipal,
|
||||
GitHubApp
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Resolved credential from the credential store.
|
||||
/// </summary>
|
||||
public sealed record ResolvedCredential
|
||||
{
|
||||
public required CredentialType Type { get; init; }
|
||||
public string? Token { get; init; }
|
||||
public string? Username { get; init; }
|
||||
public string? Password { get; init; }
|
||||
public string? PrivateKey { get; init; }
|
||||
public string? Passphrase { get; init; }
|
||||
public IReadOnlyDictionary<string, string>? Properties { get; init; }
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for resolving credentials from the credential store.
|
||||
/// Credentials are stored externally and referenced by AuthRef.
|
||||
/// </summary>
|
||||
public interface ICredentialResolver
|
||||
{
|
||||
/// <summary>
|
||||
/// Resolves credentials by AuthRef.
|
||||
/// </summary>
|
||||
/// <param name="authRef">Reference to the credential in the store (e.g., "vault://secrets/registry-auth")</param>
|
||||
/// <param name="ct">Cancellation token</param>
|
||||
/// <returns>Resolved credential or null if not found</returns>
|
||||
Task<ResolvedCredential?> ResolveAsync(string authRef, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a credential reference is valid (exists and is accessible).
|
||||
/// </summary>
|
||||
Task<bool> ValidateRefAsync(string authRef, CancellationToken ct = default);
|
||||
}
|
||||
@@ -12,12 +12,15 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" />
|
||||
<PackageReference Include="Microsoft.Extensions.Http" />
|
||||
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" />
|
||||
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" />
|
||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" />
|
||||
<PackageReference Include="Cronos" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Infrastructure.Postgres/StellaOps.Infrastructure.Postgres.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Infrastructure.Postgres/StellaOps.Infrastructure.Postgres.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -0,0 +1,50 @@
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.Triggers;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for dispatching source triggers and creating scan jobs.
|
||||
/// </summary>
|
||||
public interface ISourceTriggerDispatcher
|
||||
{
|
||||
/// <summary>
|
||||
/// Dispatch a trigger for a source, discovering targets and creating scan jobs.
|
||||
/// </summary>
|
||||
/// <param name="sourceId">The source ID to trigger.</param>
|
||||
/// <param name="context">Trigger context with details.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Result containing the run and queued jobs.</returns>
|
||||
Task<TriggerDispatchResult> DispatchAsync(
|
||||
Guid sourceId,
|
||||
TriggerContext context,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Dispatch a trigger by source ID with simple trigger type.
|
||||
/// </summary>
|
||||
Task<TriggerDispatchResult> DispatchAsync(
|
||||
Guid sourceId,
|
||||
SbomSourceRunTrigger trigger,
|
||||
string? triggerDetails = null,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Process all scheduled sources that are due for execution.
|
||||
/// Called by the scheduler worker.
|
||||
/// </summary>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Number of sources processed.</returns>
|
||||
Task<int> ProcessScheduledSourcesAsync(CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retry a failed run for a source.
|
||||
/// </summary>
|
||||
/// <param name="sourceId">The source ID.</param>
|
||||
/// <param name="originalRunId">The original run that failed.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The new retry run.</returns>
|
||||
Task<TriggerDispatchResult> RetryAsync(
|
||||
Guid sourceId,
|
||||
Guid originalRunId,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
@@ -0,0 +1,320 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using StellaOps.Scanner.Sources.Handlers;
|
||||
using StellaOps.Scanner.Sources.Persistence;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.Triggers;
|
||||
|
||||
/// <summary>
|
||||
/// Dispatches source triggers, discovering targets and creating scan jobs.
|
||||
/// </summary>
|
||||
public sealed class SourceTriggerDispatcher : ISourceTriggerDispatcher
|
||||
{
|
||||
private readonly ISbomSourceRepository _sourceRepository;
|
||||
private readonly ISbomSourceRunRepository _runRepository;
|
||||
private readonly IEnumerable<ISourceTypeHandler> _handlers;
|
||||
private readonly IScanJobQueue _scanJobQueue;
|
||||
private readonly ILogger<SourceTriggerDispatcher> _logger;
|
||||
|
||||
public SourceTriggerDispatcher(
|
||||
ISbomSourceRepository sourceRepository,
|
||||
ISbomSourceRunRepository runRepository,
|
||||
IEnumerable<ISourceTypeHandler> handlers,
|
||||
IScanJobQueue scanJobQueue,
|
||||
ILogger<SourceTriggerDispatcher> logger)
|
||||
{
|
||||
_sourceRepository = sourceRepository;
|
||||
_runRepository = runRepository;
|
||||
_handlers = handlers;
|
||||
_scanJobQueue = scanJobQueue;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public Task<TriggerDispatchResult> DispatchAsync(
|
||||
Guid sourceId,
|
||||
SbomSourceRunTrigger trigger,
|
||||
string? triggerDetails = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var context = new TriggerContext
|
||||
{
|
||||
Trigger = trigger,
|
||||
TriggerDetails = triggerDetails,
|
||||
CorrelationId = Guid.NewGuid().ToString("N")
|
||||
};
|
||||
|
||||
return DispatchAsync(sourceId, context, ct);
|
||||
}
|
||||
|
||||
public async Task<TriggerDispatchResult> DispatchAsync(
|
||||
Guid sourceId,
|
||||
TriggerContext context,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Dispatching {Trigger} for source {SourceId}, correlationId={CorrelationId}",
|
||||
context.Trigger, sourceId, context.CorrelationId);
|
||||
|
||||
// 1. Get the source
|
||||
var source = await _sourceRepository.GetByIdAsync(null!, sourceId, ct);
|
||||
if (source == null)
|
||||
{
|
||||
_logger.LogWarning("Source {SourceId} not found", sourceId);
|
||||
throw new KeyNotFoundException($"Source {sourceId} not found");
|
||||
}
|
||||
|
||||
// 2. Check if source can be triggered
|
||||
var canTrigger = CanTrigger(source, context);
|
||||
if (!canTrigger.Success)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Source {SourceId} cannot be triggered: {Reason}",
|
||||
sourceId, canTrigger.Error);
|
||||
|
||||
// Create a failed run for tracking
|
||||
var failedRun = SbomSourceRun.Create(
|
||||
sourceId,
|
||||
source.TenantId,
|
||||
context.Trigger,
|
||||
context.CorrelationId,
|
||||
context.TriggerDetails);
|
||||
failedRun.Fail(canTrigger.Error!);
|
||||
await _runRepository.CreateAsync(failedRun, ct);
|
||||
|
||||
return new TriggerDispatchResult
|
||||
{
|
||||
Run = failedRun,
|
||||
Success = false,
|
||||
Error = canTrigger.Error
|
||||
};
|
||||
}
|
||||
|
||||
// 3. Create the run record
|
||||
var run = SbomSourceRun.Create(
|
||||
sourceId,
|
||||
source.TenantId,
|
||||
context.Trigger,
|
||||
context.CorrelationId,
|
||||
context.TriggerDetails);
|
||||
|
||||
await _runRepository.CreateAsync(run, ct);
|
||||
|
||||
try
|
||||
{
|
||||
// 4. Get the appropriate handler
|
||||
var handler = GetHandler(source.SourceType);
|
||||
if (handler == null)
|
||||
{
|
||||
run.Fail($"No handler registered for source type {source.SourceType}");
|
||||
await _runRepository.UpdateAsync(run, ct);
|
||||
return new TriggerDispatchResult
|
||||
{
|
||||
Run = run,
|
||||
Success = false,
|
||||
Error = run.ErrorMessage
|
||||
};
|
||||
}
|
||||
|
||||
// 5. Discover targets
|
||||
var targets = await handler.DiscoverTargetsAsync(source, context, ct);
|
||||
run.SetDiscoveredItems(targets.Count);
|
||||
await _runRepository.UpdateAsync(run, ct);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Discovered {Count} targets for source {SourceId}",
|
||||
targets.Count, sourceId);
|
||||
|
||||
if (targets.Count == 0)
|
||||
{
|
||||
run.Complete();
|
||||
await _runRepository.UpdateAsync(run, ct);
|
||||
source.RecordSuccessfulRun(DateTimeOffset.UtcNow);
|
||||
await _sourceRepository.UpdateAsync(source, ct);
|
||||
|
||||
return new TriggerDispatchResult
|
||||
{
|
||||
Run = run,
|
||||
Targets = targets,
|
||||
JobsQueued = 0
|
||||
};
|
||||
}
|
||||
|
||||
// 6. Queue scan jobs
|
||||
var jobsQueued = 0;
|
||||
foreach (var target in targets)
|
||||
{
|
||||
try
|
||||
{
|
||||
var jobId = await _scanJobQueue.EnqueueAsync(new ScanJobRequest
|
||||
{
|
||||
SourceId = sourceId,
|
||||
RunId = run.RunId,
|
||||
TenantId = source.TenantId,
|
||||
Reference = target.Reference,
|
||||
Digest = target.Digest,
|
||||
CorrelationId = context.CorrelationId,
|
||||
Metadata = target.Metadata
|
||||
}, ct);
|
||||
|
||||
run.RecordItemSuccess(jobId);
|
||||
jobsQueued++;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to queue scan for target {Reference}", target.Reference);
|
||||
run.RecordItemFailure();
|
||||
}
|
||||
}
|
||||
|
||||
// 7. Complete or fail based on results
|
||||
if (run.ItemsFailed == run.ItemsDiscovered)
|
||||
{
|
||||
run.Fail("All targets failed to queue");
|
||||
source.RecordFailedRun(DateTimeOffset.UtcNow, run.ErrorMessage!);
|
||||
}
|
||||
else
|
||||
{
|
||||
run.Complete();
|
||||
source.RecordSuccessfulRun(DateTimeOffset.UtcNow);
|
||||
}
|
||||
|
||||
await _runRepository.UpdateAsync(run, ct);
|
||||
await _sourceRepository.UpdateAsync(source, ct);
|
||||
|
||||
return new TriggerDispatchResult
|
||||
{
|
||||
Run = run,
|
||||
Targets = targets,
|
||||
JobsQueued = jobsQueued
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Dispatch failed for source {SourceId}", sourceId);
|
||||
|
||||
run.Fail(ex.Message);
|
||||
await _runRepository.UpdateAsync(run, ct);
|
||||
|
||||
source.RecordFailedRun(DateTimeOffset.UtcNow, ex.Message);
|
||||
await _sourceRepository.UpdateAsync(source, ct);
|
||||
|
||||
return new TriggerDispatchResult
|
||||
{
|
||||
Run = run,
|
||||
Success = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<int> ProcessScheduledSourcesAsync(CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogDebug("Processing scheduled sources");
|
||||
|
||||
var dueSources = await _sourceRepository.GetDueForScheduledRunAsync(ct);
|
||||
var processed = 0;
|
||||
|
||||
foreach (var source in dueSources)
|
||||
{
|
||||
try
|
||||
{
|
||||
var context = TriggerContext.Scheduled(source.CronSchedule!);
|
||||
await DispatchAsync(source.SourceId, context, ct);
|
||||
processed++;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to process scheduled source {SourceId}", source.SourceId);
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogInformation("Processed {Count} scheduled sources", processed);
|
||||
return processed;
|
||||
}
|
||||
|
||||
public async Task<TriggerDispatchResult> RetryAsync(
|
||||
Guid sourceId,
|
||||
Guid originalRunId,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var originalRun = await _runRepository.GetByIdAsync(originalRunId, ct);
|
||||
if (originalRun == null)
|
||||
{
|
||||
throw new KeyNotFoundException($"Run {originalRunId} not found");
|
||||
}
|
||||
|
||||
var context = new TriggerContext
|
||||
{
|
||||
Trigger = originalRun.Trigger,
|
||||
TriggerDetails = $"Retry of run {originalRunId}",
|
||||
CorrelationId = Guid.NewGuid().ToString("N"),
|
||||
Metadata = new() { ["originalRunId"] = originalRunId.ToString() }
|
||||
};
|
||||
|
||||
return await DispatchAsync(sourceId, context, ct);
|
||||
}
|
||||
|
||||
private ISourceTypeHandler? GetHandler(SbomSourceType sourceType)
|
||||
{
|
||||
return _handlers.FirstOrDefault(h => h.SourceType == sourceType);
|
||||
}
|
||||
|
||||
private static (bool Success, string? Error) CanTrigger(SbomSource source, TriggerContext context)
|
||||
{
|
||||
if (source.Status == SbomSourceStatus.Disabled)
|
||||
{
|
||||
return (false, "Source is disabled");
|
||||
}
|
||||
|
||||
if (source.Status == SbomSourceStatus.Pending)
|
||||
{
|
||||
return (false, "Source has not been activated");
|
||||
}
|
||||
|
||||
if (source.Paused)
|
||||
{
|
||||
return (false, $"Source is paused: {source.PauseReason}");
|
||||
}
|
||||
|
||||
if (source.Status == SbomSourceStatus.Error)
|
||||
{
|
||||
// Allow manual triggers for error state to allow recovery
|
||||
if (context.Trigger != SbomSourceRunTrigger.Manual)
|
||||
{
|
||||
return (false, "Source is in error state. Use manual trigger to recover.");
|
||||
}
|
||||
}
|
||||
|
||||
if (source.IsRateLimited())
|
||||
{
|
||||
return (false, "Source is rate limited");
|
||||
}
|
||||
|
||||
return (true, null);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for the scan job queue.
|
||||
/// </summary>
|
||||
public interface IScanJobQueue
|
||||
{
|
||||
/// <summary>
|
||||
/// Enqueue a scan job.
|
||||
/// </summary>
|
||||
Task<Guid> EnqueueAsync(ScanJobRequest request, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to create a scan job.
|
||||
/// </summary>
|
||||
public sealed record ScanJobRequest
|
||||
{
|
||||
public required Guid SourceId { get; init; }
|
||||
public required Guid RunId { get; init; }
|
||||
public required string TenantId { get; init; }
|
||||
public required string Reference { get; init; }
|
||||
public string? Digest { get; init; }
|
||||
public required string CorrelationId { get; init; }
|
||||
public Dictionary<string, string> Metadata { get; init; } = [];
|
||||
}
|
||||
@@ -0,0 +1,124 @@
|
||||
using System.Text.Json;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.Triggers;
|
||||
|
||||
/// <summary>
|
||||
/// Context information for a source trigger.
|
||||
/// </summary>
|
||||
public sealed record TriggerContext
|
||||
{
|
||||
/// <summary>Type of trigger that initiated this run.</summary>
|
||||
public required SbomSourceRunTrigger Trigger { get; init; }
|
||||
|
||||
/// <summary>Details about the trigger (e.g., webhook event type, cron expression).</summary>
|
||||
public string? TriggerDetails { get; init; }
|
||||
|
||||
/// <summary>Correlation ID for distributed tracing.</summary>
|
||||
public required string CorrelationId { get; init; }
|
||||
|
||||
/// <summary>Webhook payload for webhook-triggered runs.</summary>
|
||||
public JsonDocument? WebhookPayload { get; init; }
|
||||
|
||||
/// <summary>Additional metadata from the trigger source.</summary>
|
||||
public Dictionary<string, string> Metadata { get; init; } = [];
|
||||
|
||||
/// <summary>Creates a context for a manual trigger.</summary>
|
||||
public static TriggerContext Manual(string triggeredBy, string? correlationId = null) => new()
|
||||
{
|
||||
Trigger = SbomSourceRunTrigger.Manual,
|
||||
TriggerDetails = $"Triggered by {triggeredBy}",
|
||||
CorrelationId = correlationId ?? Guid.NewGuid().ToString("N"),
|
||||
Metadata = new() { ["triggeredBy"] = triggeredBy }
|
||||
};
|
||||
|
||||
/// <summary>Creates a context for a scheduled trigger.</summary>
|
||||
public static TriggerContext Scheduled(string cronExpression, string? correlationId = null) => new()
|
||||
{
|
||||
Trigger = SbomSourceRunTrigger.Scheduled,
|
||||
TriggerDetails = $"Cron: {cronExpression}",
|
||||
CorrelationId = correlationId ?? Guid.NewGuid().ToString("N")
|
||||
};
|
||||
|
||||
/// <summary>Creates a context for a webhook trigger.</summary>
|
||||
public static TriggerContext Webhook(
|
||||
string eventDetails,
|
||||
JsonDocument payload,
|
||||
string? correlationId = null) => new()
|
||||
{
|
||||
Trigger = SbomSourceRunTrigger.Webhook,
|
||||
TriggerDetails = eventDetails,
|
||||
CorrelationId = correlationId ?? Guid.NewGuid().ToString("N"),
|
||||
WebhookPayload = payload
|
||||
};
|
||||
|
||||
/// <summary>Creates a context for a push event trigger (registry/git push via webhook).</summary>
|
||||
public static TriggerContext Push(
|
||||
string eventDetails,
|
||||
JsonDocument payload,
|
||||
string? correlationId = null) => new()
|
||||
{
|
||||
Trigger = SbomSourceRunTrigger.Webhook,
|
||||
TriggerDetails = $"Push: {eventDetails}",
|
||||
CorrelationId = correlationId ?? Guid.NewGuid().ToString("N"),
|
||||
WebhookPayload = payload
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Target to be scanned, discovered by a source handler.
|
||||
/// </summary>
|
||||
public sealed record ScanTarget
|
||||
{
|
||||
/// <summary>Reference to the target (image ref, repo URL, etc.).</summary>
|
||||
public required string Reference { get; init; }
|
||||
|
||||
/// <summary>Optional pinned digest for container images.</summary>
|
||||
public string? Digest { get; init; }
|
||||
|
||||
/// <summary>Metadata about the target.</summary>
|
||||
public Dictionary<string, string> Metadata { get; init; } = [];
|
||||
|
||||
/// <summary>Priority of this target (higher = scan first).</summary>
|
||||
public int Priority { get; init; } = 0;
|
||||
|
||||
/// <summary>Creates a container image target.</summary>
|
||||
public static ScanTarget Image(string reference, string? digest = null) => new()
|
||||
{
|
||||
Reference = reference,
|
||||
Digest = digest
|
||||
};
|
||||
|
||||
/// <summary>Creates a git repository target.</summary>
|
||||
public static ScanTarget Repository(string repoUrl, string branch, string? commitSha = null) => new()
|
||||
{
|
||||
Reference = repoUrl,
|
||||
Metadata = new()
|
||||
{
|
||||
["branch"] = branch,
|
||||
["commitSha"] = commitSha ?? "",
|
||||
["ref"] = $"refs/heads/{branch}"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of dispatching a trigger.
|
||||
/// </summary>
|
||||
public sealed record TriggerDispatchResult
|
||||
{
|
||||
/// <summary>The run created for this trigger.</summary>
|
||||
public required SbomSourceRun Run { get; init; }
|
||||
|
||||
/// <summary>Targets discovered and queued for scanning.</summary>
|
||||
public IReadOnlyList<ScanTarget> Targets { get; init; } = [];
|
||||
|
||||
/// <summary>Number of scan jobs created.</summary>
|
||||
public int JobsQueued { get; init; }
|
||||
|
||||
/// <summary>Whether the dispatch was successful.</summary>
|
||||
public bool Success { get; init; } = true;
|
||||
|
||||
/// <summary>Error message if dispatch failed.</summary>
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,293 @@
|
||||
-- ============================================================================
|
||||
-- SCANNER STORAGE - SBOM SOURCES SCHEMA
|
||||
-- ============================================================================
|
||||
-- Migration: 020_sbom_sources.sql
|
||||
-- Description: Creates tables for managing SBOM ingestion sources
|
||||
-- Supports: Zastava (registry webhooks), Docker (image scanning),
|
||||
-- CLI (external submissions), Git (source code scanning)
|
||||
-- ============================================================================
|
||||
|
||||
-- ============================================================================
|
||||
-- ENUMS
|
||||
-- ============================================================================
|
||||
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE scanner.sbom_source_type AS ENUM (
|
||||
'zastava', -- Registry webhook (Docker Hub, Harbor, ECR, etc.)
|
||||
'docker', -- Direct image scanning
|
||||
'cli', -- External SBOM submissions
|
||||
'git' -- Source code scanning
|
||||
);
|
||||
EXCEPTION WHEN duplicate_object THEN NULL;
|
||||
END $$;
|
||||
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE scanner.sbom_source_status AS ENUM (
|
||||
'draft', -- Initial state, not yet activated
|
||||
'active', -- Ready to process
|
||||
'disabled', -- Administratively disabled
|
||||
'error' -- In error state (consecutive failures)
|
||||
);
|
||||
EXCEPTION WHEN duplicate_object THEN NULL;
|
||||
END $$;
|
||||
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE scanner.sbom_source_run_status AS ENUM (
|
||||
'pending', -- Queued
|
||||
'running', -- In progress
|
||||
'succeeded', -- Completed successfully
|
||||
'failed', -- Completed with errors
|
||||
'cancelled' -- Cancelled by user
|
||||
);
|
||||
EXCEPTION WHEN duplicate_object THEN NULL;
|
||||
END $$;
|
||||
|
||||
DO $$ BEGIN
|
||||
CREATE TYPE scanner.sbom_source_run_trigger AS ENUM (
|
||||
'manual', -- User-triggered
|
||||
'scheduled', -- Cron-triggered
|
||||
'webhook', -- External webhook event
|
||||
'push' -- Registry push event
|
||||
);
|
||||
EXCEPTION WHEN duplicate_object THEN NULL;
|
||||
END $$;
|
||||
|
||||
-- ============================================================================
|
||||
-- SBOM SOURCES TABLE
|
||||
-- ============================================================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS scanner.sbom_sources (
|
||||
-- Identity
|
||||
source_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
tenant_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
|
||||
-- Type and configuration
|
||||
source_type scanner.sbom_source_type NOT NULL,
|
||||
configuration JSONB NOT NULL,
|
||||
|
||||
-- Status
|
||||
status scanner.sbom_source_status NOT NULL DEFAULT 'draft',
|
||||
|
||||
-- Authentication
|
||||
auth_ref TEXT, -- Reference to credentials in vault (e.g., "vault://secrets/registry-auth")
|
||||
|
||||
-- Webhook (for Zastava type)
|
||||
webhook_secret TEXT,
|
||||
webhook_endpoint TEXT,
|
||||
|
||||
-- Scheduling (for scheduled sources)
|
||||
cron_schedule TEXT,
|
||||
cron_timezone TEXT DEFAULT 'UTC',
|
||||
next_scheduled_run TIMESTAMPTZ,
|
||||
|
||||
-- Run tracking
|
||||
last_run_at TIMESTAMPTZ,
|
||||
last_run_status scanner.sbom_source_run_status,
|
||||
last_run_error TEXT,
|
||||
consecutive_failures INT NOT NULL DEFAULT 0,
|
||||
|
||||
-- Pause state
|
||||
paused BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
pause_reason TEXT,
|
||||
pause_ticket TEXT,
|
||||
paused_at TIMESTAMPTZ,
|
||||
paused_by TEXT,
|
||||
|
||||
-- Rate limiting
|
||||
max_scans_per_hour INT,
|
||||
last_rate_limit_reset TIMESTAMPTZ,
|
||||
scans_in_current_hour INT NOT NULL DEFAULT 0,
|
||||
|
||||
-- Metadata
|
||||
tags JSONB NOT NULL DEFAULT '[]',
|
||||
metadata JSONB NOT NULL DEFAULT '{}',
|
||||
|
||||
-- Audit
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
created_by TEXT NOT NULL,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_by TEXT NOT NULL,
|
||||
|
||||
-- Constraints
|
||||
CONSTRAINT uq_sbom_sources_tenant_name UNIQUE (tenant_id, name)
|
||||
);
|
||||
|
||||
-- Indexes for common queries
|
||||
CREATE INDEX IF NOT EXISTS ix_sbom_sources_tenant
|
||||
ON scanner.sbom_sources (tenant_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_sbom_sources_type
|
||||
ON scanner.sbom_sources (source_type);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_sbom_sources_status
|
||||
ON scanner.sbom_sources (status);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_sbom_sources_next_scheduled
|
||||
ON scanner.sbom_sources (next_scheduled_run)
|
||||
WHERE next_scheduled_run IS NOT NULL AND status = 'active' AND NOT paused;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_sbom_sources_webhook_endpoint
|
||||
ON scanner.sbom_sources (webhook_endpoint)
|
||||
WHERE webhook_endpoint IS NOT NULL;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_sbom_sources_tags
|
||||
ON scanner.sbom_sources USING GIN (tags);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_sbom_sources_name_search
|
||||
ON scanner.sbom_sources USING gin (name gin_trgm_ops);
|
||||
|
||||
-- ============================================================================
|
||||
-- SBOM SOURCE RUNS TABLE
|
||||
-- ============================================================================
|
||||
|
||||
CREATE TABLE IF NOT EXISTS scanner.sbom_source_runs (
|
||||
-- Identity
|
||||
run_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
source_id UUID NOT NULL REFERENCES scanner.sbom_sources(source_id) ON DELETE CASCADE,
|
||||
tenant_id TEXT NOT NULL,
|
||||
|
||||
-- Trigger info
|
||||
trigger scanner.sbom_source_run_trigger NOT NULL,
|
||||
trigger_details TEXT,
|
||||
correlation_id TEXT NOT NULL,
|
||||
|
||||
-- Status
|
||||
status scanner.sbom_source_run_status NOT NULL DEFAULT 'pending',
|
||||
|
||||
-- Timing
|
||||
started_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
completed_at TIMESTAMPTZ,
|
||||
duration_ms BIGINT NOT NULL DEFAULT 0,
|
||||
|
||||
-- Progress counters
|
||||
items_discovered INT NOT NULL DEFAULT 0,
|
||||
items_scanned INT NOT NULL DEFAULT 0,
|
||||
items_succeeded INT NOT NULL DEFAULT 0,
|
||||
items_failed INT NOT NULL DEFAULT 0,
|
||||
items_skipped INT NOT NULL DEFAULT 0,
|
||||
|
||||
-- Results
|
||||
scan_job_ids JSONB NOT NULL DEFAULT '[]',
|
||||
error_message TEXT,
|
||||
error_details JSONB,
|
||||
|
||||
-- Metadata
|
||||
metadata JSONB NOT NULL DEFAULT '{}'
|
||||
);
|
||||
|
||||
-- Indexes for run queries
|
||||
CREATE INDEX IF NOT EXISTS ix_sbom_source_runs_source
|
||||
ON scanner.sbom_source_runs (source_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_sbom_source_runs_tenant
|
||||
ON scanner.sbom_source_runs (tenant_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_sbom_source_runs_status
|
||||
ON scanner.sbom_source_runs (status);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_sbom_source_runs_started
|
||||
ON scanner.sbom_source_runs (started_at DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS ix_sbom_source_runs_correlation
|
||||
ON scanner.sbom_source_runs (correlation_id);
|
||||
|
||||
-- Partial index for active runs
|
||||
CREATE INDEX IF NOT EXISTS ix_sbom_source_runs_active
|
||||
ON scanner.sbom_source_runs (source_id, started_at DESC)
|
||||
WHERE status IN ('pending', 'running');
|
||||
|
||||
-- ============================================================================
|
||||
-- FUNCTIONS
|
||||
-- ============================================================================
|
||||
|
||||
-- Function to update source statistics after a run completes
|
||||
CREATE OR REPLACE FUNCTION scanner.update_source_after_run()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF NEW.status IN ('succeeded', 'failed', 'cancelled') AND
|
||||
(OLD.status IS NULL OR OLD.status IN ('pending', 'running')) THEN
|
||||
|
||||
UPDATE scanner.sbom_sources SET
|
||||
last_run_at = NEW.completed_at,
|
||||
last_run_status = NEW.status,
|
||||
last_run_error = CASE WHEN NEW.status = 'failed' THEN NEW.error_message ELSE NULL END,
|
||||
consecutive_failures = CASE
|
||||
WHEN NEW.status = 'succeeded' THEN 0
|
||||
WHEN NEW.status = 'failed' THEN consecutive_failures + 1
|
||||
ELSE consecutive_failures
|
||||
END,
|
||||
status = CASE
|
||||
WHEN NEW.status = 'failed' AND consecutive_failures >= 4 THEN 'error'::scanner.sbom_source_status
|
||||
ELSE status
|
||||
END,
|
||||
updated_at = NOW()
|
||||
WHERE source_id = NEW.source_id;
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Trigger to update source after run completion
|
||||
DROP TRIGGER IF EXISTS trg_update_source_after_run ON scanner.sbom_source_runs;
|
||||
CREATE TRIGGER trg_update_source_after_run
|
||||
AFTER UPDATE ON scanner.sbom_source_runs
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION scanner.update_source_after_run();
|
||||
|
||||
-- Function to reset rate limit counters
|
||||
CREATE OR REPLACE FUNCTION scanner.reset_rate_limit_if_needed(p_source_id UUID)
|
||||
RETURNS VOID AS $$
|
||||
BEGIN
|
||||
UPDATE scanner.sbom_sources SET
|
||||
scans_in_current_hour = 0,
|
||||
last_rate_limit_reset = NOW()
|
||||
WHERE source_id = p_source_id
|
||||
AND (last_rate_limit_reset IS NULL
|
||||
OR last_rate_limit_reset < NOW() - INTERVAL '1 hour');
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Function to calculate next scheduled run
|
||||
CREATE OR REPLACE FUNCTION scanner.calculate_next_scheduled_run(
|
||||
p_cron_schedule TEXT,
|
||||
p_timezone TEXT DEFAULT 'UTC'
|
||||
)
|
||||
RETURNS TIMESTAMPTZ AS $$
|
||||
DECLARE
|
||||
v_next TIMESTAMPTZ;
|
||||
BEGIN
|
||||
-- Note: This is a placeholder. In practice, cron parsing is done in application code.
|
||||
-- The application should call UPDATE to set next_scheduled_run after calculating it.
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql IMMUTABLE;
|
||||
|
||||
-- ============================================================================
|
||||
-- ENABLE TRIGRAM EXTENSION (if not exists)
|
||||
-- ============================================================================
|
||||
CREATE EXTENSION IF NOT EXISTS pg_trgm;
|
||||
|
||||
-- ============================================================================
|
||||
-- COMMENTS
|
||||
-- ============================================================================
|
||||
|
||||
COMMENT ON TABLE scanner.sbom_sources IS
|
||||
'Registry of SBOM ingestion sources (Zastava webhooks, Docker scanning, CLI submissions, Git repos)';
|
||||
|
||||
COMMENT ON TABLE scanner.sbom_source_runs IS
|
||||
'Execution history for SBOM source scan runs';
|
||||
|
||||
COMMENT ON COLUMN scanner.sbom_sources.auth_ref IS
|
||||
'Reference to credentials in external vault (e.g., vault://secrets/registry-auth)';
|
||||
|
||||
COMMENT ON COLUMN scanner.sbom_sources.configuration IS
|
||||
'Type-specific configuration as JSON (ZastavaSourceConfig, DockerSourceConfig, etc.)';
|
||||
|
||||
COMMENT ON COLUMN scanner.sbom_source_runs.correlation_id IS
|
||||
'Correlation ID for tracing across services';
|
||||
|
||||
COMMENT ON COLUMN scanner.sbom_source_runs.scan_job_ids IS
|
||||
'Array of scan job IDs created by this run';
|
||||
@@ -0,0 +1,380 @@
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Scanner.Sources.Configuration;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.Tests.Configuration;
|
||||
|
||||
public class SourceConfigValidatorTests
|
||||
{
|
||||
private readonly SourceConfigValidator _validator = new(NullLogger<SourceConfigValidator>.Instance);
|
||||
|
||||
#region Zastava Configuration Tests
|
||||
|
||||
[Fact]
|
||||
public void Validate_ValidZastavaConfig_ReturnsSuccess()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"registryType": "Harbor",
|
||||
"registryUrl": "https://harbor.example.com",
|
||||
"filters": {
|
||||
"repositoryPatterns": ["library/*"]
|
||||
}
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Zastava, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.Errors.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_ZastavaConfig_MissingRegistryType_ReturnsFalure()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"registryUrl": "https://harbor.example.com"
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Zastava, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("registryType"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_ZastavaConfig_InvalidRegistryType_ReturnsFalure()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"registryType": "InvalidRegistry",
|
||||
"registryUrl": "https://harbor.example.com"
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Zastava, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("Invalid registryType"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_ZastavaConfig_MissingRegistryUrl_ReturnsFalure()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"registryType": "Harbor"
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Zastava, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("registryUrl"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_ZastavaConfig_NoFilters_ReturnsWarning()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"registryType": "Harbor",
|
||||
"registryUrl": "https://harbor.example.com"
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Zastava, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.Warnings.Should().Contain(w => w.Contains("No filters"));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Docker Configuration Tests
|
||||
|
||||
[Fact]
|
||||
public void Validate_ValidDockerConfig_WithImages_ReturnsSuccess()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"registryUrl": "https://registry.example.com",
|
||||
"images": [
|
||||
{
|
||||
"repository": "library/nginx",
|
||||
"tag": "latest"
|
||||
}
|
||||
]
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Docker, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_ValidDockerConfig_WithDiscovery_ReturnsSuccess()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"registryUrl": "https://registry.example.com",
|
||||
"discoveryOptions": {
|
||||
"repositoryPattern": "library/*",
|
||||
"maxTagsPerRepo": 5
|
||||
}
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Docker, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_DockerConfig_NoImagesOrDiscovery_ReturnsFalure()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"registryUrl": "https://registry.example.com"
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Docker, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("images") || e.Contains("discoveryOptions"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_DockerConfig_ImageMissingRepository_ReturnsFalure()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"images": [
|
||||
{
|
||||
"tag": "latest"
|
||||
}
|
||||
]
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Docker, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("repository"));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CLI Configuration Tests
|
||||
|
||||
[Fact]
|
||||
public void Validate_ValidCliConfig_ReturnsSuccess()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"acceptedFormats": ["CycloneDX", "SPDX"],
|
||||
"validationRules": {
|
||||
"requireSignature": false,
|
||||
"maxFileSizeBytes": 10485760
|
||||
}
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Cli, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_CliConfig_InvalidFormat_ReturnsFalure()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"acceptedFormats": ["InvalidFormat"]
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Cli, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("Invalid SBOM format"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_CliConfig_Empty_ReturnsWarning()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("{}");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Cli, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.Warnings.Should().Contain(w => w.Contains("validation rules"));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Git Configuration Tests
|
||||
|
||||
[Fact]
|
||||
public void Validate_ValidGitConfig_HttpsUrl_ReturnsSuccess()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"repositoryUrl": "https://github.com/example/repo",
|
||||
"provider": "GitHub",
|
||||
"authMethod": "Token",
|
||||
"branchConfig": {
|
||||
"defaultBranch": "main"
|
||||
}
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Git, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_ValidGitConfig_SshUrl_ReturnsSuccess()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"repositoryUrl": "git@github.com:example/repo.git",
|
||||
"provider": "GitHub",
|
||||
"authMethod": "SshKey"
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Git, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_GitConfig_MissingRepositoryUrl_ReturnsFalure()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"provider": "GitHub"
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Git, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("repositoryUrl"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_GitConfig_InvalidProvider_ReturnsFalure()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"repositoryUrl": "https://github.com/example/repo",
|
||||
"provider": "InvalidProvider"
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Git, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("Invalid provider"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_GitConfig_NoBranchConfig_ReturnsWarning()
|
||||
{
|
||||
// Arrange
|
||||
var config = JsonDocument.Parse("""
|
||||
{
|
||||
"repositoryUrl": "https://github.com/example/repo",
|
||||
"provider": "GitHub"
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
var result = _validator.Validate(SbomSourceType.Git, config);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.Warnings.Should().Contain(w => w.Contains("branch configuration"));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Schema Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData(SbomSourceType.Zastava)]
|
||||
[InlineData(SbomSourceType.Docker)]
|
||||
[InlineData(SbomSourceType.Cli)]
|
||||
[InlineData(SbomSourceType.Git)]
|
||||
public void GetConfigurationSchema_ReturnsValidJsonSchema(SbomSourceType sourceType)
|
||||
{
|
||||
// Act
|
||||
var schema = _validator.GetConfigurationSchema(sourceType);
|
||||
|
||||
// Assert
|
||||
schema.Should().NotBeNullOrEmpty();
|
||||
var parsed = JsonDocument.Parse(schema);
|
||||
parsed.RootElement.GetProperty("$schema").GetString()
|
||||
.Should().Contain("json-schema.org");
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,222 @@
|
||||
using FluentAssertions;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.Tests.Domain;
|
||||
|
||||
public class SbomSourceRunTests
|
||||
{
|
||||
[Fact]
|
||||
public void Create_WithValidInputs_CreatesRunInPendingStatus()
|
||||
{
|
||||
// Arrange
|
||||
var sourceId = Guid.NewGuid();
|
||||
var correlationId = Guid.NewGuid().ToString("N");
|
||||
|
||||
// Act
|
||||
var run = SbomSourceRun.Create(
|
||||
sourceId: sourceId,
|
||||
tenantId: "tenant-1",
|
||||
trigger: SbomSourceRunTrigger.Manual,
|
||||
correlationId: correlationId,
|
||||
triggerDetails: "Triggered by user");
|
||||
|
||||
// Assert
|
||||
run.RunId.Should().NotBeEmpty();
|
||||
run.SourceId.Should().Be(sourceId);
|
||||
run.TenantId.Should().Be("tenant-1");
|
||||
run.Trigger.Should().Be(SbomSourceRunTrigger.Manual);
|
||||
run.CorrelationId.Should().Be(correlationId);
|
||||
run.TriggerDetails.Should().Be("Triggered by user");
|
||||
run.Status.Should().Be(SbomSourceRunStatus.Pending);
|
||||
run.ItemsDiscovered.Should().Be(0);
|
||||
run.ItemsScanned.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Start_SetsStatusToRunning()
|
||||
{
|
||||
// Arrange
|
||||
var run = CreateTestRun();
|
||||
|
||||
// Act
|
||||
run.Start();
|
||||
|
||||
// Assert
|
||||
run.Status.Should().Be(SbomSourceRunStatus.Running);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SetDiscoveredItems_UpdatesDiscoveryCount()
|
||||
{
|
||||
// Arrange
|
||||
var run = CreateTestRun();
|
||||
run.Start();
|
||||
|
||||
// Act
|
||||
run.SetDiscoveredItems(10);
|
||||
|
||||
// Assert
|
||||
run.ItemsDiscovered.Should().Be(10);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordItemSuccess_IncrementsCounts()
|
||||
{
|
||||
// Arrange
|
||||
var run = CreateTestRun();
|
||||
run.Start();
|
||||
run.SetDiscoveredItems(5);
|
||||
|
||||
// Act
|
||||
var scanJobId = Guid.NewGuid();
|
||||
run.RecordItemSuccess(scanJobId);
|
||||
run.RecordItemSuccess(Guid.NewGuid());
|
||||
|
||||
// Assert
|
||||
run.ItemsScanned.Should().Be(2);
|
||||
run.ItemsSucceeded.Should().Be(2);
|
||||
run.ScanJobIds.Should().Contain(scanJobId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordItemFailure_IncrementsCounts()
|
||||
{
|
||||
// Arrange
|
||||
var run = CreateTestRun();
|
||||
run.Start();
|
||||
run.SetDiscoveredItems(5);
|
||||
|
||||
// Act
|
||||
run.RecordItemFailure();
|
||||
run.RecordItemFailure();
|
||||
|
||||
// Assert
|
||||
run.ItemsScanned.Should().Be(2);
|
||||
run.ItemsFailed.Should().Be(2);
|
||||
run.ItemsSucceeded.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordItemSkipped_IncrementsCounts()
|
||||
{
|
||||
// Arrange
|
||||
var run = CreateTestRun();
|
||||
run.Start();
|
||||
run.SetDiscoveredItems(5);
|
||||
|
||||
// Act
|
||||
run.RecordItemSkipped();
|
||||
|
||||
// Assert
|
||||
run.ItemsScanned.Should().Be(1);
|
||||
run.ItemsSkipped.Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Complete_SetsSuccessStatusAndDuration()
|
||||
{
|
||||
// Arrange
|
||||
var run = CreateTestRun();
|
||||
run.Start();
|
||||
run.SetDiscoveredItems(3);
|
||||
run.RecordItemSuccess(Guid.NewGuid());
|
||||
run.RecordItemSuccess(Guid.NewGuid());
|
||||
run.RecordItemSuccess(Guid.NewGuid());
|
||||
|
||||
// Act
|
||||
run.Complete();
|
||||
|
||||
// Assert
|
||||
run.Status.Should().Be(SbomSourceRunStatus.Succeeded);
|
||||
run.CompletedAt.Should().NotBeNull();
|
||||
run.DurationMs.Should().BeGreaterOrEqualTo(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Fail_SetsFailedStatusAndErrorMessage()
|
||||
{
|
||||
// Arrange
|
||||
var run = CreateTestRun();
|
||||
run.Start();
|
||||
|
||||
// Act
|
||||
run.Fail("Connection timeout", new { retries = 3 });
|
||||
|
||||
// Assert
|
||||
run.Status.Should().Be(SbomSourceRunStatus.Failed);
|
||||
run.ErrorMessage.Should().Be("Connection timeout");
|
||||
run.ErrorDetails.Should().NotBeNull();
|
||||
run.CompletedAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Cancel_SetsCancelledStatus()
|
||||
{
|
||||
// Arrange
|
||||
var run = CreateTestRun();
|
||||
run.Start();
|
||||
|
||||
// Act
|
||||
run.Cancel();
|
||||
|
||||
// Assert
|
||||
run.Status.Should().Be(SbomSourceRunStatus.Cancelled);
|
||||
run.CompletedAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MixedResults_TracksAllCountsCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var run = CreateTestRun();
|
||||
run.Start();
|
||||
run.SetDiscoveredItems(10);
|
||||
|
||||
// Act
|
||||
run.RecordItemSuccess(Guid.NewGuid()); // 1 success
|
||||
run.RecordItemSuccess(Guid.NewGuid()); // 2 successes
|
||||
run.RecordItemFailure(); // 1 failure
|
||||
run.RecordItemSkipped(); // 1 skipped
|
||||
run.RecordItemSuccess(Guid.NewGuid()); // 3 successes
|
||||
run.RecordItemFailure(); // 2 failures
|
||||
|
||||
// Assert
|
||||
run.ItemsScanned.Should().Be(6);
|
||||
run.ItemsSucceeded.Should().Be(3);
|
||||
run.ItemsFailed.Should().Be(2);
|
||||
run.ItemsSkipped.Should().Be(1);
|
||||
run.ScanJobIds.Should().HaveCount(3);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(SbomSourceRunTrigger.Manual, "Manual trigger")]
|
||||
[InlineData(SbomSourceRunTrigger.Scheduled, "Cron: 0 * * * *")]
|
||||
[InlineData(SbomSourceRunTrigger.Webhook, "Harbor push event")]
|
||||
[InlineData(SbomSourceRunTrigger.Push, "Registry push event")]
|
||||
public void Create_WithDifferentTriggers_StoresTriggerInfo(
|
||||
SbomSourceRunTrigger trigger,
|
||||
string details)
|
||||
{
|
||||
// Arrange & Act
|
||||
var run = SbomSourceRun.Create(
|
||||
sourceId: Guid.NewGuid(),
|
||||
tenantId: "tenant-1",
|
||||
trigger: trigger,
|
||||
correlationId: Guid.NewGuid().ToString("N"),
|
||||
triggerDetails: details);
|
||||
|
||||
// Assert
|
||||
run.Trigger.Should().Be(trigger);
|
||||
run.TriggerDetails.Should().Be(details);
|
||||
}
|
||||
|
||||
private static SbomSourceRun CreateTestRun()
|
||||
{
|
||||
return SbomSourceRun.Create(
|
||||
sourceId: Guid.NewGuid(),
|
||||
tenantId: "tenant-1",
|
||||
trigger: SbomSourceRunTrigger.Manual,
|
||||
correlationId: Guid.NewGuid().ToString("N"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,232 @@
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Scanner.Sources.Domain;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Sources.Tests.Domain;
|
||||
|
||||
public class SbomSourceTests
|
||||
{
|
||||
private static readonly JsonDocument SampleConfig = JsonDocument.Parse("""
|
||||
{
|
||||
"registryType": "Harbor",
|
||||
"registryUrl": "https://harbor.example.com"
|
||||
}
|
||||
""");
|
||||
|
||||
[Fact]
|
||||
public void Create_WithValidInputs_CreatesSourceInDraftStatus()
|
||||
{
|
||||
// Arrange & Act
|
||||
var source = SbomSource.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "test-source",
|
||||
sourceType: SbomSourceType.Zastava,
|
||||
configuration: SampleConfig,
|
||||
createdBy: "user-1");
|
||||
|
||||
// Assert
|
||||
source.SourceId.Should().NotBeEmpty();
|
||||
source.TenantId.Should().Be("tenant-1");
|
||||
source.Name.Should().Be("test-source");
|
||||
source.SourceType.Should().Be(SbomSourceType.Zastava);
|
||||
source.Status.Should().Be(SbomSourceStatus.Draft);
|
||||
source.CreatedBy.Should().Be("user-1");
|
||||
source.Paused.Should().BeFalse();
|
||||
source.ConsecutiveFailures.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_WithCronSchedule_CalculatesNextScheduledRun()
|
||||
{
|
||||
// Arrange & Act
|
||||
var source = SbomSource.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "scheduled-source",
|
||||
sourceType: SbomSourceType.Docker,
|
||||
configuration: SampleConfig,
|
||||
createdBy: "user-1",
|
||||
cronSchedule: "0 * * * *"); // Every hour
|
||||
|
||||
// Assert
|
||||
source.CronSchedule.Should().Be("0 * * * *");
|
||||
source.NextScheduledRun.Should().NotBeNull();
|
||||
source.NextScheduledRun.Should().BeAfter(DateTimeOffset.UtcNow);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_WithZastavaType_GeneratesWebhookEndpointAndSecret()
|
||||
{
|
||||
// Arrange & Act
|
||||
var source = SbomSource.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "webhook-source",
|
||||
sourceType: SbomSourceType.Zastava,
|
||||
configuration: SampleConfig,
|
||||
createdBy: "user-1");
|
||||
|
||||
// Assert
|
||||
source.WebhookEndpoint.Should().NotBeNullOrEmpty();
|
||||
source.WebhookSecret.Should().NotBeNullOrEmpty();
|
||||
source.WebhookSecret!.Length.Should().BeGreaterOrEqualTo(32);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Activate_FromDraft_ChangesStatusToActive()
|
||||
{
|
||||
// Arrange
|
||||
var source = SbomSource.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "test-source",
|
||||
sourceType: SbomSourceType.Docker,
|
||||
configuration: SampleConfig,
|
||||
createdBy: "user-1");
|
||||
|
||||
// Act
|
||||
source.Activate("activator");
|
||||
|
||||
// Assert
|
||||
source.Status.Should().Be(SbomSourceStatus.Active);
|
||||
source.UpdatedBy.Should().Be("activator");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Pause_WhenActive_PausesSource()
|
||||
{
|
||||
// Arrange
|
||||
var source = SbomSource.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "test-source",
|
||||
sourceType: SbomSourceType.Docker,
|
||||
configuration: SampleConfig,
|
||||
createdBy: "user-1");
|
||||
source.Activate("activator");
|
||||
|
||||
// Act
|
||||
source.Pause("Maintenance window", "TICKET-123", "operator");
|
||||
|
||||
// Assert
|
||||
source.Paused.Should().BeTrue();
|
||||
source.PauseReason.Should().Be("Maintenance window");
|
||||
source.PauseTicket.Should().Be("TICKET-123");
|
||||
source.PausedAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Resume_WhenPaused_UnpausesSource()
|
||||
{
|
||||
// Arrange
|
||||
var source = SbomSource.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "test-source",
|
||||
sourceType: SbomSourceType.Docker,
|
||||
configuration: SampleConfig,
|
||||
createdBy: "user-1");
|
||||
source.Activate("activator");
|
||||
source.Pause("Maintenance", null, "operator");
|
||||
|
||||
// Act
|
||||
source.Resume("operator");
|
||||
|
||||
// Assert
|
||||
source.Paused.Should().BeFalse();
|
||||
source.PauseReason.Should().BeNull();
|
||||
source.PausedAt.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordSuccessfulRun_ResetsConsecutiveFailures()
|
||||
{
|
||||
// Arrange
|
||||
var source = SbomSource.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "test-source",
|
||||
sourceType: SbomSourceType.Docker,
|
||||
configuration: SampleConfig,
|
||||
createdBy: "user-1");
|
||||
source.Activate("activator");
|
||||
|
||||
// Simulate some failures
|
||||
source.RecordFailedRun("Error 1");
|
||||
source.RecordFailedRun("Error 2");
|
||||
source.ConsecutiveFailures.Should().Be(2);
|
||||
|
||||
// Act
|
||||
source.RecordSuccessfulRun();
|
||||
|
||||
// Assert
|
||||
source.ConsecutiveFailures.Should().Be(0);
|
||||
source.LastRunStatus.Should().Be(SbomSourceRunStatus.Succeeded);
|
||||
source.LastRunError.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordFailedRun_MultipleTimes_MovesToErrorStatus()
|
||||
{
|
||||
// Arrange
|
||||
var source = SbomSource.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "test-source",
|
||||
sourceType: SbomSourceType.Docker,
|
||||
configuration: SampleConfig,
|
||||
createdBy: "user-1");
|
||||
source.Activate("activator");
|
||||
|
||||
// Act - fail 5 times (threshold is 5)
|
||||
for (var i = 0; i < 5; i++)
|
||||
{
|
||||
source.RecordFailedRun($"Error {i + 1}");
|
||||
}
|
||||
|
||||
// Assert
|
||||
source.Status.Should().Be(SbomSourceStatus.Error);
|
||||
source.ConsecutiveFailures.Should().Be(5);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsRateLimited_WhenUnderLimit_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var source = SbomSource.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "test-source",
|
||||
sourceType: SbomSourceType.Docker,
|
||||
configuration: SampleConfig,
|
||||
createdBy: "user-1");
|
||||
source.MaxScansPerHour = 10;
|
||||
source.Activate("activator");
|
||||
|
||||
// Act
|
||||
var isLimited = source.IsRateLimited();
|
||||
|
||||
// Assert
|
||||
isLimited.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UpdateConfiguration_ChangesConfigAndUpdatesTimestamp()
|
||||
{
|
||||
// Arrange
|
||||
var source = SbomSource.Create(
|
||||
tenantId: "tenant-1",
|
||||
name: "test-source",
|
||||
sourceType: SbomSourceType.Docker,
|
||||
configuration: SampleConfig,
|
||||
createdBy: "user-1");
|
||||
|
||||
var newConfig = JsonDocument.Parse("""
|
||||
{
|
||||
"registryType": "DockerHub",
|
||||
"registryUrl": "https://registry-1.docker.io"
|
||||
}
|
||||
""");
|
||||
|
||||
// Act
|
||||
source.UpdateConfiguration(newConfig, "updater");
|
||||
|
||||
// Assert
|
||||
source.Configuration.RootElement.GetProperty("registryType").GetString()
|
||||
.Should().Be("DockerHub");
|
||||
source.UpdatedBy.Should().Be("updater");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Scanner.Sources/StellaOps.Scanner.Sources.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.TestKit/StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" />
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Moq" />
|
||||
<PackageReference Include="coverlet.collector" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
Reference in New Issue
Block a user