This commit is contained in:
StellaOps Bot
2026-01-02 11:47:13 +02:00
80 changed files with 15087 additions and 5608 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,678 @@
# Backport Resolver Tiered Evidence - Implementation Design
**Sprint:** SPRINT_20251230_001_BE
**Version:** 1.0
**Last Updated:** 2025-12-30
---
## Table of Contents
1. [Architecture Overview](#architecture-overview)
2. [Component Design](#component-design)
3. [Data Models](#data-models)
4. [Algorithms](#algorithms)
5. [Integration Points](#integration-points)
6. [Security & Compliance](#security--compliance)
---
## 1. Architecture Overview
### 1.1 Current State
```
BackportStatusService
EvalPatchedStatusAsync()
GetRulesAsync() from repository
EvaluateBoundaryRules() [STRING COMPARE ]
EvaluateRangeRules() [RETURNS UNKNOWN ]
Return verdict
Consumes rules from
IFixRuleRepository
(OVAL/CSAF/Changelog rules)
- Only native distro
- No derivative mapping
```
**Problems:**
- String comparison fails for version semantics (epoch, tildes, etc.)
- RangeRule logic not implemented always returns Unknown
- No cross-distro evidence reuse (AlmaLinux OVAL for RHEL)
- No bug ID CVE resolution
### 1.2 Target State
```
BackportStatusService
EvalPatchedStatusAsync()
**Tier 1**: FetchRulesWithDerivativeMapping() [NEW]
Query RHEL try Alma/Rocky if not found
**Tier 2-4**: GetRulesAsync() (existing)
**Tier 5**: EvaluateRangeRules() [FIXED]
Hierarchical resolver with version comparators [NEW]
IReadOnlyDictionary<PackageEcosystem, IVersionComparator>
RPM RpmVersionComparer (epoch:version-release)
Deb DebianVersionComparer (epoch:upstream-debian~pre)
Alpine ApkVersionComparer (X.Y.Z_pN-rN)
Fallback StringVersionComparer
Rules BugCVE mapping
IFixRuleRepository IBugCveMappingService
+ DistroMappings DebianSecurityTracker
+ ChangelogParser RedHatBugzilla (stub)
(with Bug IDs) UbuntuCVETracker
```
---
## 2. Component Design
### 2.1 BackportStatusService (Enhanced)
**Responsibilities:**
- Orchestrate 5-tier evidence hierarchy
- Inject and delegate to version comparators
- Apply derivative distro mapping logic
- Aggregate evidence from multiple tiers
- Return confident verdicts with audit trails
**Key Methods:**
```csharp
public sealed class BackportStatusService
{
private readonly IFixRuleRepository _ruleRepository;
private readonly IReadOnlyDictionary<PackageEcosystem, IVersionComparator> _comparators;
private readonly IBugCveMappingService? _bugMapper; // Optional
// TIER 1: Try derivative OVAL/CSAF
private async ValueTask<IReadOnlyList<IFixRule>> FetchRulesWithDerivativeMapping(
BackportContext context,
PackageInstance package,
CveId cve,
CancellationToken ct);
// TIER 2-4: Existing rule sources (unchanged)
// TIER 5: Evaluate NVD ranges with version comparators
private BackportVerdict EvaluateRangeRules(
CveId cve,
PackageInstance package,
IReadOnlyList<RangeRule> rules);
// Helper: Get comparator for ecosystem
private IVersionComparator GetComparatorForEcosystem(PackageEcosystem ecosystem) =>
_comparators.GetValueOrDefault(ecosystem, StringVersionComparer.Instance);
}
```
**Dependency Injection:**
```csharp
builder.Services.AddSingleton<IBackportStatusService, BackportStatusService>();
builder.Services.AddSingleton<IRpmVersionComparer, RpmVersionComparer>();
builder.Services.AddSingleton<IDebianVersionComparer, DebianVersionComparer>();
builder.Services.AddSingleton<IApkVersionComparer, ApkVersionComparer>();
builder.Services.AddSingleton<IBugCveMappingService, CompositeBugCveMappingService>();
```
---
### 2.2 DistroMappings (New Component)
**File:** src/__Libraries/StellaOps.DistroIntel/DistroDerivative.cs
**Purpose:** Define and query derivative distro relationships (RHELAlma/Rocky, etc.)
**Data Model:**
```csharp
public enum DerivativeConfidence
{
High, // ABI-compatible rebuilds (Alma/Rocky RHEL)
Medium // Modified derivatives (Mint Ubuntu, Ubuntu Debian)
}
public sealed record DistroDerivative(
string CanonicalDistro, // "rhel"
string DerivativeDistro, // "almalinux"
int MajorRelease, // 9
DerivativeConfidence Confidence);
public static class DistroMappings
{
public static readonly ImmutableArray<DistroDerivative> Derivatives = [...];
public static IEnumerable<DistroDerivative> FindDerivativesFor(
string distro,
int majorRelease);
public static decimal GetConfidenceMultiplier(DerivativeConfidence conf);
}
```
**Usage Pattern:**
```csharp
// When fetching rules for Rocky 9:
var derivatives = DistroMappings.FindDerivativesFor("rhel", 9);
// Returns: [("rhel", "almalinux", 9, High), ("rhel", "rocky", 9, High)]
foreach (var d in derivatives.OrderByDescending(x => x.Confidence))
{
var derivativeRules = await _repo.GetRulesAsync(ctx with { Distro = d.DerivativeDistro }, ...);
if (derivativeRules.Any())
{
// Apply 0.95 multiplier for High confidence
return derivativeRules.Select(r => r with {
Confidence = r.Confidence * 0.95m
});
}
}
```
---
### 2.3 IBugCveMappingService (New Interface)
**File:** src/__Libraries/StellaOps.BugTracking/IBugCveMappingService.cs
**Purpose:** Resolve distro bug IDs to CVE IDs
**Interface:**
```csharp
public interface IBugCveMappingService
{
ValueTask<IReadOnlyList<CveId>> LookupCvesAsync(
BugId bugId,
CancellationToken ct = default);
}
public sealed record BugId(string Tracker, string Id);
```
**Implementations:**
1. **DebianSecurityTrackerClient**
- Source: https://security-tracker.debian.org/tracker/data/json
- Caching: 1h TTL, in-memory
2. **RedHatBugzillaClient** (stub)
- Requires authentication cache pre-populated mappings
- Future: integrate with RHBZ API
3. **UbuntuCVETrackerClient**
- Source: https://ubuntu.com/security/cves scraper
- Caching: 1h TTL
4. **CompositeBugCveMappingService**
- Routes to correct implementation based on BugId.Tracker
**Example:**
```csharp
var bugId = new BugId("Debian", "987654");
var cves = await _bugMapper.LookupCvesAsync(bugId);
// Returns: [CVE-2024-1234, CVE-2024-5678]
```
---
### 2.4 ChangelogParser (Enhanced)
**File:** src/Concelier/__Libraries/StellaOps.Concelier.SourceIntel/ChangelogParser.cs
**Changes:**
- Add regex patterns for bug IDs (Debian, RHBZ, Launchpad)
- Extend ChangelogEntry record to include BugIds collection
- Extract both CVE IDs and bug IDs in parallel
**Updated Model:**
```csharp
public sealed record ChangelogEntry(
string Version,
DateTimeOffset Date,
IReadOnlyList<CveId> CveIds,
IReadOnlyList<BugId> BugIds, // NEW
string Description);
```
**Regex Patterns:**
```csharp
[GeneratedRegex(@"CVE-\d{4}-\d{4,}")]
private static partial Regex CvePatternRegex(); // Existing
[GeneratedRegex(@"Closes:\s*#(\d+)", RegexOptions.IgnoreCase)]
private static partial Regex DebianBugRegex(); // NEW
[GeneratedRegex(@"(?:RHBZ|rhbz)#(\d+)", RegexOptions.IgnoreCase)]
private static partial Regex RhBugzillaRegex(); // NEW
[GeneratedRegex(@"LP:\s*#(\d+)", RegexOptions.IgnoreCase)]
private static partial Regex LaunchpadBugRegex(); // NEW
```
---
### 2.5 HunkSigExtractor (Enhanced)
**File:** src/Feedser/StellaOps.Feedser.Core/HunkSigExtractor.cs
**Changes:**
- Extract function signatures from patch context
- Populate PatchHunkSig.AffectedFunctions (currently null)
- Support C/C++, Python, Go function patterns
**Function Extraction Logic:**
```csharp
private static IReadOnlyList<string> ExtractFunctionsFromContext(PatchHunk hunk)
{
var functions = new HashSet<string>();
// C/C++: "static void foo(" or "int bar("
foreach (Match m in CFunctionRegex().Matches(hunk.Context))
functions.Add(m.Groups[1].Value);
// Python: "def foo(" or "class Bar:"
foreach (Match m in PythonFunctionRegex().Matches(hunk.Context))
functions.Add(m.Groups[1].Value);
// Go: "func (r *Receiver) Method("
foreach (Match m in GoFunctionRegex().Matches(hunk.Context))
functions.Add(m.Groups[1].Value);
return functions.ToArray();
}
// Usage:
AffectedFunctions = ExtractFunctionsFromContext(hunk),
```
---
## 3. Data Models
### 3.1 BackportVerdict (Existing, No Changes)
```csharp
public sealed record BackportVerdict(
FixStatus Status, // Fixed | Vulnerable | Unknown
VerdictConfidence Confidence, // High | Medium | Low
RuleType EvidenceSource, // Boundary | Range | Changelog | Patch
EvidencePointer EvidencePointer, // URI, digest, timestamp
string? ConflictReason);
```
### 3.2 RulePriority (Updated Enum)
```csharp
public enum RulePriority
{
// Tier 1: OVAL/CSAF evidence
DistroNativeOval = 100, // Distro's own OVAL/CSAF
DerivativeOvalHigh = 95, // Alma/Rocky for RHEL
DerivativeOvalMedium = 90, // Mint for Ubuntu
// Tier 2: Changelog evidence
ChangelogExplicitCve = 85, // Direct CVE mention
ChangelogBugIdMapped = 75, // Bug ID CVE mapping
// Tier 3: Source patches
SourcePatchExactMatch = 70, // Exact hunk hash match
SourcePatchFuzzyMatch = 60, // Function name + context match
// Tier 4: Upstream commits
UpstreamCommitExactParity = 55, // 100% hunk parity
UpstreamCommitPartialMatch = 45, // Partial context match
// Tier 5: NVD range heuristic
NvdRangeHeuristic = 20 // Version range check (low confidence)
}
```
### 3.3 EvidencePointer (Existing, Extended)
```csharp
public sealed record EvidencePointer(
string Type, // "OvalAdvisory" | "DebianChangelog" | "NvdCpeRange"
string Uri, // "oval:ALSA-2024-1234" | "deb:curl/changelog#L42"
string SourceDigest, // SHA-256 of artifact
DateTimeOffset FetchedAt);
```
**New URI Schemes:**
- derivative:almalinuxrhel:oval:ALSA-2024-1234 (Tier 1)
- changelog:debian:curl:1.2.3#bug:987654 (Tier 2 with bug ID)
-
vd:cve/CVE-2024-1234/cpe:2.3:a:vendor:product:* (Tier 5)
---
## 4. Algorithms
### 4.1 Hierarchical Evidence Resolver
```pseudo
FUNCTION ResolveFixStatus(cve, package, distro, release):
// TIER 1: Try derivative OVAL/CSAF
rules FetchNativeRules(distro, release, package, cve)
IF rules.IsEmpty THEN
derivatives DistroMappings.FindDerivativesFor(distro, release)
FOR EACH derivative IN derivatives ORDER BY Confidence DESC:
derivativeRules FetchNativeRules(
derivative.DerivativeDistro,
release,
package,
cve)
IF derivativeRules.IsNotEmpty THEN
confidenceMultiplier derivative.Confidence == High ? 0.95 : 0.80
rules ApplyConfidencePenalty(derivativeRules, confidenceMultiplier)
BREAK // Use first successful derivative
END IF
END FOR
END IF
// TIER 2-4: Existing sources (changelog, patches, commits)
IF rules.IsEmpty THEN
rules FetchEvidenceBasedRules(distro, package, cve)
END IF
// TIER 5: NVD range fallback
IF rules.IsEmpty THEN
rules FetchNvdRangeRules(cve, package)
END IF
// Evaluate rules with version comparators
RETURN EvaluateRulesWithVersionSemantics(rules, package)
END FUNCTION
```
### 4.2 Version Comparison with Ecosystem-Specific Logic
```pseudo
FUNCTION CompareVersions(v1, v2, ecosystem):
comparator GetComparatorForEcosystem(ecosystem)
MATCH ecosystem:
CASE RPM:
// Parse epoch:version-release
// Compare epoch first, then version, then release
// Handle ~ (pre-release) and ^ (post-release)
RETURN RpmVersionComparer.CompareWithProof(v1, v2)
CASE Debian:
// Parse epoch:upstream-debian~pre
// Tilde sorting: 1.0~beta < 1.0
RETURN DebianVersionComparer.CompareWithProof(v1, v2)
CASE Alpine:
// Parse X.Y.Z_pN-rN
// _p = patch level, -r = package revision
RETURN ApkVersionComparer.CompareWithProof(v1, v2)
DEFAULT:
// Fallback to SemVer or string comparison
RETURN StringVersionComparer.Compare(v1, v2)
END MATCH
END FUNCTION
```
### 4.3 Range Evaluation (Tier 5)
```pseudo
FUNCTION EvaluateRangeRules(cve, package, rangeRules):
comparator GetComparatorForEcosystem(package.Ecosystem)
FOR EACH rule IN rangeRules ORDER BY Priority DESC:
range rule.AffectedRange
inRange TRUE
// Check lower bound
IF range.MinVersion IS NOT NULL THEN
cmp comparator.Compare(package.Version, range.MinVersion)
inRange inRange AND (range.MinInclusive ? cmp >= 0 : cmp > 0)
END IF
// Check upper bound
IF range.MaxVersion IS NOT NULL THEN
cmp comparator.Compare(package.Version, range.MaxVersion)
inRange inRange AND (range.MaxInclusive ? cmp <= 0 : cmp < 0)
END IF
IF inRange THEN
RETURN Verdict(Status: VULNERABLE, Confidence: LOW, Evidence: rule)
END IF
END FOR
RETURN Verdict(Status: UNKNOWN, Confidence: LOW)
END FUNCTION
```
### 4.4 Confidence Scoring
```pseudo
FUNCTION GetConfidenceForPriority(priority):
IF priority >= 75 THEN // Tier 1-2
RETURN VerdictConfidence.High
ELSE IF priority >= 45 THEN // Tier 3-4
RETURN VerdictConfidence.Medium
ELSE // Tier 5
RETURN VerdictConfidence.Low
END IF
END FUNCTION
```
---
## 5. Integration Points
### 5.1 Feedser Integration (Evidence Ingestion)
**Components:**
- OvalFeedProcessor Tier 1 (OVAL advisory parsing)
- CsafFeedProcessor Tier 1 (CSAF VEX parsing)
- ChangelogFeedProcessor Tier 2 (enhanced with bug ID extraction)
- PatchFeedProcessor Tier 3 (HunkSigExtractor with functions)
**Data Flow:**
```
Feedser Ingestion Pipeline
OVAL/CSAF Normalize Store with distro tags
(almalinux, rocky, rhel)
Changelogs Parse Extract CVEs + Bug IDs
Map Bug IDs to CVEs (async)
Patches Extract hunks Compute hunk sigs + functions
Store in content-addressed storage
```
### 5.2 VexLens Integration (Verdict Consumption)
**Components:**
- VexConsensusEngine Aggregates verdicts from BackportStatusService
- CycloneDxVexEmitter Emits signed VEX statements with evidence
**Enhancements:**
- Include EvidencePointer URIs in VEX statements
- Add confidence field (mapped from VerdictConfidence)
- Annotate Tier 5 verdicts with justification: "range-based heuristic"
**Example VEX Output:**
```json
{
"vulnerability": {
"id": "CVE-2024-1234"
},
"analysis": {
"state": "resolved",
"justification": "code_not_present",
"responses": ["will_not_fix", "update"],
"detail": "Fixed in curl-7.76.1-26.el9_3.2 (backport)",
"confidence": "high",
"evidence": [
{
"type": "OvalAdvisory",
"uri": "derivative:almalinuxrocky:oval:ALSA-2024-1234",
"digest": "sha256:abc123...",
"tier": 1
}
]
}
}
```
### 5.3 External API Integrations
| API | Purpose | Caching | Fallback |
|-----|---------|---------|----------|
| Debian Security Tracker | Bug ID CVE mapping | 1h TTL | Skip bug ID evidence |
| Red Hat Bugzilla | Bug ID CVE mapping | Pre-populated cache | Skip bug ID evidence |
| Ubuntu CVE Tracker | Bug ID CVE mapping | 1h TTL | Skip bug ID evidence |
**Rate Limiting:**
- Debian: No explicit limit, but batch requests every 5 minutes
- RHBZ: Requires auth, use cached dump
- Ubuntu: Scraper-based, respect robots.txt (1 req/sec)
---
## 6. Security & Compliance
### 6.1 Evidence Integrity
**Requirements:**
- All evidence artifacts must be cryptographically hashed (SHA-256)
- Store SourceDigest in EvidencePointer
- Enable deterministic replay by re-fetching and re-hashing
**Implementation:**
```csharp
public static string ComputeDigest(byte[] artifact) =>
Convert.ToHexString(SHA256.HashData(artifact)).ToLowerInvariant();
var digest = ComputeDigest(Encoding.UTF8.GetBytes(ovalXml));
var pointer = new EvidencePointer(
Type: "OvalAdvisory",
Uri: $"oval:ALSA-2024-1234",
SourceDigest: digest,
FetchedAt: DateTimeOffset.UtcNow);
```
### 6.2 Audit Trail
**Logging Requirements:**
- Log every tier attempted (1 2 ... 5)
- Log reason for tier fallback (e.g., "Tier 1: no OVAL found for rocky 9")
- Log derivative mapping decisions (e.g., "Using AlmaLinux OVAL for Rocky 9, confidence penalty 0.05")
- Log version comparison details (e.g., "1:2.0 > 3.0 (epoch wins)")
**Structured Logging Format:**
```json
{
"timestamp": "2025-12-30T12:34:56Z",
"level": "INFO",
"message": "Tier 1 fallback: derivative OVAL found",
"cve": "CVE-2024-1234",
"package": "curl-7.76.1-26.el9_3.2",
"distro": "rocky 9",
"derivativeUsed": "almalinux 9",
"confidence": 0.95,
"tier": 1,
"evidenceUri": "derivative:almalinuxrocky:oval:ALSA-2024-1234"
}
```
### 6.3 Signed VEX Attestations
**Signature Method:** in-toto/DSSE with Ed25519 keys
**Signed Payload:**
```json
{
"payloadType": "application/vnd.cyclonedx+json",
"payload": "<base64-encoded CycloneDX VEX>",
"signatures": [
{
"keyid": "SHA256:abc123...",
"sig": "<base64-encoded signature>"
}
]
}
```
**Replay Provenance:**
- Include feed snapshot digest
- Include resolver policy version
- Store signed attestation in content-addressed storage
---
## 7. Performance Considerations
### 7.1 Latency Targets
| Tier | Operation | Target Latency | Notes |
|------|-----------|----------------|-------|
| 1 | Derivative OVAL query | <50ms | In-memory or local DB |
| 2 | Changelog parsing | <100ms | Pre-indexed by package version |
| 3 | Patch hunk matching | <150ms | Content-addressed lookup |
| 4 | Upstream commit mapping | <500ms | May require git fetch (cached) |
| 5 | NVD range check | <50ms | Simple version comparison |
**Overall P95 Latency Goal:** <200ms for typical case (Tier 1-3)
### 7.2 Caching Strategy
**In-Memory Caches:**
- Bug ID CVE mappings: 1h TTL, max 10,000 entries
- Derivative OVAL queries: 5min TTL, max 5,000 entries
- Version comparison results: 10min TTL, max 50,000 entries
**Persistent Caches:**
- OVAL/CSAF feeds: File-based, refresh every 6h
- Patch hunk signatures: Content-addressed storage (immutable)
### 7.3 Scalability
**Concurrency:**
- Parallel tier evaluation within single CVE (Tier 1-3 can run concurrently if needed)
- Bulk CVE scans: Process 100 CVEs in parallel with semaphore limit
**Database Optimization:**
- Index on (distro, release, package_name, cve_id)
- Partition OVAL/CSAF rules by distro family (rhel, debian, alpine)
---
**End of Design Document**

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,906 @@
# SPRINT_20251230_001_BE_backport_resolver_tiered_evidence
## Sprint Metadata
| Field | Value |
|-------|-------|
| **Sprint ID** | SPRINT_20251230_001_BE |
| **Topic** | Tiered Evidence Backport Resolver Enhancement |
| **Module** | Concelier.BackportProof, Concelier.SourceIntel, Feedser.Core |
| **Working Directory** | `src/Concelier/__Libraries/StellaOps.Concelier.BackportProof/` |
| **Priority** | P0 - Critical |
| **Estimated Effort** | 5 days |
| **Dependencies** | StellaOps.VersionComparison, StellaOps.Concelier.Merge |
---
## Executive Summary
This sprint addresses critical gaps in the backport patch resolver that cause false positives/negatives when determining if a CVE is fixed in Linux distribution packages. The current implementation uses string comparison for version matching and lacks derivative distro mapping, resulting in incorrect vulnerability assessments.
### Key Deliverables
1. Wire ecosystem-specific version comparators into BackportStatusService
2. Implement RangeRule evaluation for NVD fallback (Tier 5)
3. Add derivative distro mapping for OVAL/CSAF cross-referencing (Tier 1)
4. Enhance changelog parsing with bug ID → CVE mapping (Tier 2)
5. Extract affected functions from patch context (Tier 3/4)
6. Align confidence scoring to five-tier evidence hierarchy
---
## Background & Problem Statement
### Current State
The `BackportStatusService` uses `string.Compare()` for version comparison:
```csharp
// BackportStatusService.cs:198
var isPatched = string.Compare(package.InstalledVersion, fixedVersion, StringComparison.Ordinal) >= 0;
```
**Failures:**
- `1.2.10` vs `1.2.9` → returns `-1` (should be `+1`)
- `1:2.0` vs `3.0` (epoch) → completely wrong
- `1.2.3~beta` vs `1.2.3` (tilde) → wrong order
### Proposed Five-Tier Evidence Hierarchy
| Tier | Evidence Source | Confidence | Priority |
|------|-----------------|------------|----------|
| 1 | Derivative OVAL/CSAF (same release) | 0.95-0.98 | 100 |
| 2 | Changelog CVE markers | 0.75-0.85 | 85 |
| 3 | Source patch files (HunkSig) | 0.80-0.95 | 70 |
| 4 | Upstream commit mapping | 0.55-0.85 | 55 |
| 5 | NVD version ranges (fallback) | Low only | 20 |
---
## Delivery Tracker
### Phase 1: Version Comparator Integration (P0)
| Task ID | Description | Status | Assignee | Notes |
|---------|-------------|--------|----------|-------|
| BP-101 | Create IVersionComparatorFactory interface | TODO | | DI registration |
| BP-102 | Wire comparators into BackportStatusService | TODO | | RPM, DEB, APK |
| BP-103 | Update EvaluateBoundaryRules with proof lines | TODO | | Audit trail |
| BP-104 | Unit tests for version comparison edge cases | TODO | | Golden datasets |
| BP-105 | Integration test: epoch handling | TODO | | `1:2.0` vs `3.0` |
### Phase 2: RangeRule Implementation (P0)
| Task ID | Description | Status | Assignee | Notes |
|---------|-------------|--------|----------|-------|
| BP-201 | Implement EvaluateRangeRules with comparators | TODO | | Min/max bounds |
| BP-202 | Handle inclusive/exclusive boundaries | TODO | | `[` vs `(` |
| BP-203 | Add Low confidence for NVD-sourced ranges | TODO | | Tier 5 |
| BP-204 | Unit tests for range edge cases | TODO | | Open/closed |
| BP-205 | Integration test: NVD fallback path | TODO | | E2E flow |
### Phase 3: Derivative Distro Mapping (P1)
| Task ID | Description | Status | Assignee | Notes |
|---------|-------------|--------|----------|-------|
| BP-301 | Create DistroDerivativeMapping model | TODO | | Canonical/derivative |
| BP-302 | Add RHEL ↔ Alma/Rocky/CentOS mappings | TODO | | Major release |
| BP-303 | Add Ubuntu ↔ LinuxMint mappings | TODO | | |
| BP-304 | Add Debian ↔ Ubuntu mappings | TODO | | |
| BP-305 | Integrate into rule fetching with confidence penalty | TODO | | 0.95x multiplier |
| BP-306 | Unit tests for derivative lookup | TODO | | |
| BP-307 | Integration test: cross-distro OVAL | TODO | | RHEL→Rocky |
### Phase 4: Bug ID → CVE Mapping (P1)
| Task ID | Description | Status | Assignee | Notes |
|---------|-------------|--------|----------|-------|
| BP-401 | Add Debian bug regex extraction | TODO | | `Closes: #123` |
| BP-402 | Add RHBZ bug regex extraction | TODO | | `RHBZ#123` |
| BP-403 | Add Launchpad bug regex extraction | TODO | | `LP: #123` |
| BP-404 | Create IBugCveMappingService interface | TODO | | Async lookup |
| BP-405 | Implement DebianSecurityTrackerClient | TODO | | API client |
| BP-406 | Implement RedHatErrataClient | TODO | | API client |
| BP-407 | Cache layer for bug→CVE mappings | TODO | | 24h TTL |
| BP-408 | Unit tests for bug ID extraction | TODO | | Regex patterns |
| BP-409 | Integration test: Debian tracker lookup | TODO | | Live API |
### Phase 5: Affected Functions Extraction (P2)
| Task ID | Description | Status | Assignee | Notes |
|---------|-------------|--------|----------|-------|
| BP-501 | Create function signature regex patterns | TODO | | C, Go, Python |
| BP-502 | Implement ExtractFunctionsFromContext | TODO | | In HunkSigExtractor |
| BP-503 | Add C/C++ function pattern | TODO | | `void foo(` |
| BP-504 | Add Go function pattern | TODO | | `func (r *R) M(` |
| BP-505 | Add Python function pattern | TODO | | `def foo(` |
| BP-506 | Add Rust function pattern | TODO | | `fn foo(` |
| BP-507 | Unit tests for function extraction | TODO | | Multi-language |
| BP-508 | Enable fuzzy function matching in Tier 3/4 | TODO | | Similarity score |
### Phase 6: Confidence Tier Alignment (P2)
| Task ID | Description | Status | Assignee | Notes |
|---------|-------------|--------|----------|-------|
| BP-601 | Expand RulePriority enum | TODO | | 9 levels |
| BP-602 | Update BackportStatusService priority logic | TODO | | Tier ordering |
| BP-603 | Add confidence multipliers per tier | TODO | | |
| BP-604 | Update EvidencePointer with TierSource | TODO | | Audit |
| BP-605 | Unit tests for tier precedence | TODO | | |
---
## Decisions & Risks
### Decisions Made
| ID | Decision | Rationale | Date |
|----|----------|-----------|------|
| D-001 | Use existing VersionComparison library | Already implements rpmvercmp, dpkg, apk semantics | 2025-12-30 |
| D-002 | Derivative confidence penalty 0.95x (High) / 0.80x (Medium) | Same ABI rebuilds vs partial compatibility | 2025-12-30 |
| D-003 | Bug→CVE cache TTL 24 hours | Balance freshness vs API rate limits | 2025-12-30 |
### Open Risks
| ID | Risk | Mitigation | Status |
|----|------|------------|--------|
| R-001 | Debian Security Tracker API rate limits | Implement exponential backoff + cache | OPEN |
| R-002 | Function extraction may produce false positives | Add confidence penalty for fuzzy matches | OPEN |
| R-003 | Derivative mappings may drift across major releases | Version-specific mapping table | OPEN |
---
## Acceptance Criteria
### P0 Tasks (Must complete)
- [ ] `BackportStatusService` uses proper version comparators for all ecosystems
- [ ] `RangeRule` evaluation returns correct verdicts with Low confidence
- [ ] All existing tests pass
- [ ] New golden tests for version edge cases
### P1 Tasks (Should complete)
- [ ] Derivative distro mapping works for RHEL family
- [ ] Bug ID extraction finds Debian/RHBZ/LP references
- [ ] Bug→CVE mapping lookup is cached
### P2 Tasks (Nice to have)
- [ ] Function extraction works for C, Go, Python, Rust
- [ ] Confidence tiers aligned to five-tier hierarchy
---
## Test Strategy
### Unit Tests
| Area | Test File | Coverage Target |
|------|-----------|-----------------|
| Version comparison | `BackportStatusServiceVersionTests.cs` | All ecosystems |
| Range evaluation | `BackportStatusServiceRangeTests.cs` | Boundary conditions |
| Derivative mapping | `DistroDerivativeMappingTests.cs` | All supported distros |
| Bug ID extraction | `ChangelogBugIdExtractionTests.cs` | Regex patterns |
| Function extraction | `HunkSigFunctionExtractionTests.cs` | Multi-language |
### Integration Tests
| Scenario | Test File | External Dependencies |
|----------|-----------|----------------------|
| Cross-distro OVAL | `CrossDistroOvalIntegrationTests.cs` | None (fixtures) |
| Bug→CVE lookup | `BugCveMappingIntegrationTests.cs` | Debian Tracker API |
| Full resolver flow | `BackportResolverE2ETests.cs` | PostgreSQL (Testcontainers) |
### Golden Datasets
Location: `src/__Tests/__Datasets/backport-resolver/`
| Dataset | Purpose |
|---------|---------|
| `rpm-version-edge-cases.json` | Epoch, tilde, release variations |
| `deb-version-edge-cases.json` | Epoch, revision, ubuntu suffixes |
| `apk-version-edge-cases.json` | Pre-release suffixes, pkgrel |
| `cross-distro-oval-fixtures/` | RHEL/Rocky/Alma advisory samples |
| `changelog-with-bugids/` | Debian/RPM changelogs with bug refs |
---
## Execution Log
| Date | Event | Details |
|------|-------|---------|
| 2025-12-30 | Sprint created | Initial planning and gap analysis |
---
## References
- `src/Concelier/__Libraries/StellaOps.Concelier.BackportProof/Services/BackportStatusService.cs`
- `src/Concelier/__Libraries/StellaOps.Concelier.BackportProof/Models/FixRuleModels.cs`
- `src/Concelier/__Libraries/StellaOps.Concelier.Merge/Comparers/RpmVersionComparer.cs`
- `src/Concelier/__Libraries/StellaOps.Concelier.Merge/Comparers/ApkVersionComparer.cs`
- `src/Concelier/__Libraries/StellaOps.Concelier.SourceIntel/ChangelogParser.cs`
- `src/Feedser/StellaOps.Feedser.Core/HunkSigExtractor.cs`
- `src/VexLens/StellaOps.VexLens/Consensus/VexConsensusEngine.cs`
**Key Improvements:**
- Wire existing version comparators (RpmVersionComparer, ApkVersionComparer, DebianVersionComparer) into BackportStatusService
- Implement NVD range evaluation (Tier 5 fallback)
- Add derivative distro mapping (RHELAlma/Rocky, UbuntuMint) for Tier 1 evidence
- Extend changelog parser to extract bug IDs and map to CVEs
- Extract function signatures from patch hunks for better matching
- Align confidence scoring with 5-tier evidence hierarchy
**Impact:**
- Eliminates false positives from incorrect version comparisons (e.g., "1.2.10" < "1.2.9")
- Enables cross-distro evidence sharing (e.g., use AlmaLinux OVAL for RHEL)
- Provides auditable, signed VEX statements with evidence trails
- Reduces manual verification workload by 60-80%
---
## Problem Statement
### Current Implementation Gaps
| Gap ID | Description | Severity | Current Behavior | Desired Behavior |
|--------|-------------|----------|------------------|------------------|
| GAP-001 | String-based version comparison | **CRITICAL** | "1.2.10" < "1.2.9" returns true | Use ecosystem-specific comparers (EVR, dpkg, apk) |
| GAP-002 | RangeRule returns Unknown | **CRITICAL** | NVD ranges ignored, always Unknown | Evaluate ranges with proper version semantics |
| GAP-003 | No derivative distro mapping | **HIGH** | AlmaLinux OVAL unused for RHEL scans | Map RHELAlma/Rocky, UbuntuMint with confidence |
| GAP-004 | Bug IDCVE mapping missing | **HIGH** | Only direct CVE mentions detected | Extract Debian/RHBZ/LP bug IDs, map to CVEs |
| GAP-005 | AffectedFunctions not extracted | **MEDIUM** | Hunk matching relies only on content hash | Extract C/Python/Go function signatures for fuzzy match |
| GAP-006 | Confidence tiers misaligned | **MEDIUM** | Priority values don't match evidence quality | Align with 5-tier hierarchy (Tier 1=High, Tier 5=Low) |
### Real-World Example
**Scenario:** CVE-2024-1234 in curl on Rocky Linux 9
**Current behavior:**
```
- Installed: curl-7.76.1-26.el9_3.2
- NVD says: "Fixed in 7.77.0"
- String comparison: "7.76.1-26.el9_3.2" < "7.77.0" **VULNERABLE** (WRONG!)
```
**Root cause:** Red Hat backported the fix to 7.76.1-26, but string comparison doesn't understand epoch-version-release semantics.
**Correct behavior (after sprint):**
```
1. Check AlmaLinux OVAL (Tier 1): Found fix in curl-7.76.1-26.el9_3.2
2. Map AlmaRocky (High confidence, same ABI)
3. Verdict: **FIXED** , Confidence: High, Evidence: [Alma OVAL advisory ALSA-2024-1234]
```
---
## 5-Tier Evidence Hierarchy (Target Architecture)
`mermaid
graph TD
A[CVE + Package + Distro] --> B{Tier 1: Derivative OVAL/CSAF}
B -->|Found| C[Verdict: FIXED/VULNERABLE<br/>Confidence: High 0.95-0.98]
B -->|Not Found| D{Tier 2: Changelog Markers}
D -->|CVE Match| E[Verdict: FIXED<br/>Confidence: High 0.85]
D -->|Bug ID Match| F[Verdict: FIXED<br/>Confidence: Medium 0.75]
D -->|Not Found| G{Tier 3: Source Patch Files}
G -->|Exact Hunk Hash| H[Verdict: FIXED<br/>Confidence: Medium-High 0.90]
G -->|Fuzzy Function Match| I[Verdict: FIXED<br/>Confidence: Medium 0.70]
G -->|Not Found| J{Tier 4: Upstream Commit Mapping}
J -->|100% Hunk Parity| K[Verdict: FIXED<br/>Confidence: Medium 0.80]
J -->|Partial Match| L[Verdict: FIXED<br/>Confidence: Medium-Low 0.60]
J -->|Not Found| M{Tier 5: NVD Range Fallback}
M -->|In Range| N[Verdict: VULNERABLE<br/>Confidence: Low 0.40]
M -->|Out of Range| O[Verdict: FIXED<br/>Confidence: Low 0.50]
M -->|No Data| P[Verdict: UNKNOWN<br/>Confidence: Low 0.30]
`
---
## Sprint Tasks
### Phase 1: Foundation (P0 - Critical Path)
#### Task 1.1: Wire Version Comparators into BackportStatusService
- **File:** src/Concelier/__Libraries/StellaOps.Concelier.BackportProof/Services/BackportStatusService.cs
- **Effort:** 2h
- **Dependencies:** StellaOps.Concelier.Merge.Comparers
- **Acceptance Criteria:**
- [ ] Add IReadOnlyDictionary<PackageEcosystem, IVersionComparator> field
- [ ] Inject comparators in constructor (RPM, Debian, Alpine, Conda)
- [ ] Replace string.Compare() in EvaluateBoundaryRules() with comparator.CompareWithProof()
- [ ] Add fallback StringVersionComparer for unknown ecosystems
- [ ] Unit test: "1.2.10" > "1.2.9" for RPM/Deb/Alpine
- [ ] Unit test: Epoch handling "1:2.0" > "3.0"
- [ ] Unit test: Tilde pre-releases "1.2.3~beta" < "1.2.3"
**Code Snippet:**
```csharp
// BackportStatusService.cs
private readonly IReadOnlyDictionary<PackageEcosystem, IVersionComparator> _comparators;
public BackportStatusService(
IFixRuleRepository ruleRepository,
IRpmVersionComparer rpmComparer,
IDebianVersionComparer debComparer,
IApkVersionComparer apkComparer)
{
_comparators = new Dictionary<PackageEcosystem, IVersionComparator>
{
[PackageEcosystem.Rpm] = rpmComparer,
[PackageEcosystem.Deb] = debComparer,
[PackageEcosystem.Alpine] = apkComparer,
}.ToFrozenDictionary();
}
private BackportVerdict EvaluateBoundaryRules(...)
{
var comparator = _comparators.GetValueOrDefault(
package.Key.Ecosystem,
StringVersionComparer.Instance);
var result = comparator.CompareWithProof(
package.InstalledVersion,
fixedVersion);
var isPatched = result.Result >= 0;
// ... rest of logic
}
```
---
#### Task 1.2: Implement RangeRule Evaluation (Tier 5)
- **File:** BackportStatusService.cs::EvaluateRangeRules()
- **Effort:** 3h
- **Acceptance Criteria:**
- [ ] Evaluate AffectedRange.MinVersion and MaxVersion with inclusive/exclusive bounds
- [ ] Return FixStatus.Vulnerable if in range, FixStatus.Fixed if out of range
- [ ] Set VerdictConfidence.Low for all Tier 5 decisions
- [ ] Add evidence pointer to NVD CPE/range definition
- [ ] Handle null min/max (unbounded ranges)
- [ ] Unit test: CVE-2024-1234 with range [1.0.0, 2.0.0) versions 1.5.0 (vuln), 2.0.1 (fixed)
**Code Snippet:**
```csharp
private BackportVerdict EvaluateRangeRules(
CveId cve,
PackageInstance package,
IReadOnlyList<RangeRule> rules)
{
var comparator = _comparators.GetValueOrDefault(
package.Key.Ecosystem,
StringVersionComparer.Instance);
foreach (var rule in rules.OrderByDescending(r => r.Priority))
{
var range = rule.AffectedRange;
var inRange = true;
if (range.MinVersion != null)
{
var cmp = comparator.Compare(package.InstalledVersion, range.MinVersion);
inRange &= range.MinInclusive ? cmp >= 0 : cmp > 0;
}
if (range.MaxVersion != null)
{
var cmp = comparator.Compare(package.InstalledVersion, range.MaxVersion);
inRange &= range.MaxInclusive ? cmp <= 0 : cmp < 0;
}
if (inRange)
{
return new BackportVerdict(
Status: FixStatus.Vulnerable,
Confidence: VerdictConfidence.Low, // Tier 5 always Low
EvidenceSource: RuleType.Range,
EvidencePointer: new EvidencePointer(
Type: "NvdCpeRange",
Uri: $"nvd:cve/{cve}/cpe/{rule.CpeId}",
SourceDigest: ComputeDigest(rule)),
ConflictReason: null);
}
}
return new BackportVerdict(
Status: FixStatus.Unknown,
Confidence: VerdictConfidence.Low,
ConflictReason: "No matching range rule");
}
```
---
### Phase 2: Derivative Distro Mapping (P1)
#### Task 2.1: Create DistroDerivative Model and Mappings
- **New File:** src/__Libraries/StellaOps.DistroIntel/DistroDerivative.cs
- **Effort:** 2h
- **Acceptance Criteria:**
- [ ] Define DistroDerivative record with canonical/derivative names, release, confidence
- [ ] Create static DistroMappings class with predefined derivatives
- [ ] Support RHELAlma/Rocky (High confidence), UbuntuMint (Medium), DebianUbuntu (Medium)
- [ ] Add FindDerivativesFor(distro, release) query method
- [ ] Unit test: Query "rhel 9" returns ["almalinux 9", "rocky 9"]
**Code Snippet:**
```csharp
namespace StellaOps.DistroIntel;
public enum DerivativeConfidence
{
High, // Same ABI, byte-for-byte rebuilds (Alma/Rocky from RHEL)
Medium // Derivative with modifications (Ubuntu from Debian, Mint from Ubuntu)
}
public sealed record DistroDerivative(
string CanonicalDistro,
string DerivativeDistro,
int MajorRelease,
DerivativeConfidence Confidence);
public static class DistroMappings
{
public static readonly ImmutableArray<DistroDerivative> Derivatives =
[
new("rhel", "almalinux", 9, DerivativeConfidence.High),
new("rhel", "rocky", 9, DerivativeConfidence.High),
new("rhel", "centos", 9, DerivativeConfidence.High),
new("rhel", "almalinux", 8, DerivativeConfidence.High),
new("rhel", "rocky", 8, DerivativeConfidence.High),
new("ubuntu", "linuxmint", 22, DerivativeConfidence.Medium),
new("ubuntu", "linuxmint", 20, DerivativeConfidence.Medium),
new("debian", "ubuntu", 12, DerivativeConfidence.Medium),
];
public static IEnumerable<DistroDerivative> FindDerivativesFor(
string distro,
int majorRelease)
{
return Derivatives.Where(d =>
d.CanonicalDistro.Equals(distro, StringComparison.OrdinalIgnoreCase) &&
d.MajorRelease == majorRelease);
}
public static decimal GetConfidenceMultiplier(DerivativeConfidence conf) =>
conf switch
{
DerivativeConfidence.High => 0.95m,
DerivativeConfidence.Medium => 0.80m,
_ => 0.70m
};
}
```
---
#### Task 2.2: Integrate Derivative Mapping into BackportStatusService
- **File:** BackportStatusService.cs
- **Effort:** 2h
- **Acceptance Criteria:**
- [ ] After fetching rules for target distro, if empty, try derivative mappings
- [ ] Query derivative rules and apply confidence penalty
- [ ] Annotate evidence with derivative source
- [ ] Integration test: Scan Rocky 9 with only AlmaLinux OVAL data success
**Code Snippet:**
```csharp
private async ValueTask<IReadOnlyList<IFixRule>> FetchRulesWithDerivativeMapping(
BackportContext context,
PackageInstance package,
CveId cve)
{
// Try direct distro first
var rules = await _ruleRepository.GetRulesAsync(context, package, cve);
if (rules.Count == 0)
{
var derivatives = DistroMappings.FindDerivativesFor(
context.Distro,
context.Release);
foreach (var derivative in derivatives.OrderByDescending(d => d.Confidence))
{
var derivativeContext = context with
{
Distro = derivative.DerivativeDistro
};
var derivativeRules = await _ruleRepository.GetRulesAsync(
derivativeContext,
package,
cve);
if (derivativeRules.Count > 0)
{
// Apply confidence penalty
var multiplier = DistroMappings.GetConfidenceMultiplier(
derivative.Confidence);
rules = derivativeRules.Select(r => r with
{
Confidence = r.Confidence * multiplier,
EvidencePointer = r.EvidencePointer with
{
Uri = $"derivative:{derivative.DerivativeDistro}{context.Distro}:{r.EvidencePointer.Uri}"
}
}).ToList();
break; // Use first successful derivative
}
}
}
return rules;
}
```
---
### Phase 3: Bug ID CVE Mapping (P1)
#### Task 3.1: Extend ChangelogParser with Bug ID Extraction
- **File:** src/Concelier/__Libraries/StellaOps.Concelier.SourceIntel/ChangelogParser.cs
- **Effort:** 3h
- **Acceptance Criteria:**
- [ ] Add regex patterns for Debian (Closes: #123456), RHBZ (RHBZ#123456), Launchpad (LP: #123456)
- [ ] Extract bug IDs alongside CVE IDs
- [ ] Return ChangelogEntry with both CveIds and BugIds collections
- [ ] Unit test: Parse Debian changelog with "Closes: #987654" bug ID extracted
**Code Snippet:**
```csharp
[GeneratedRegex(@"Closes:\s*#(\d+)", RegexOptions.IgnoreCase)]
private static partial Regex DebianBugRegex();
[GeneratedRegex(@"(?:RHBZ|rhbz)#(\d+)", RegexOptions.IgnoreCase)]
private static partial Regex RhBugzillaRegex();
[GeneratedRegex(@"LP:\s*#(\d+)", RegexOptions.IgnoreCase)]
private static partial Regex LaunchpadBugRegex();
public sealed record ChangelogEntry(
string Version,
DateTimeOffset Date,
IReadOnlyList<CveId> CveIds,
IReadOnlyList<BugId> BugIds, // NEW
string Description);
public sealed record BugId(string Tracker, string Id)
{
public override string ToString() => $"{Tracker}#{Id}";
}
private static IReadOnlyList<BugId> ExtractBugIds(string line)
{
var bugs = new List<BugId>();
foreach (Match m in DebianBugRegex().Matches(line))
bugs.Add(new BugId("Debian", m.Groups[1].Value));
foreach (Match m in RhBugzillaRegex().Matches(line))
bugs.Add(new BugId("RHBZ", m.Groups[1].Value));
foreach (Match m in LaunchpadBugRegex().Matches(line))
bugs.Add(new BugId("Launchpad", m.Groups[1].Value));
return bugs;
}
```
---
#### Task 3.2: Implement BugCVE Mapping Service
- **New File:** src/__Libraries/StellaOps.BugTracking/IBugCveMappingService.cs
- **Effort:** 4h (including API clients)
- **Acceptance Criteria:**
- [ ] Define IBugCveMappingService.LookupCvesAsync(BugId)
- [ ] Implement Debian Security Tracker API client (https://security-tracker.debian.org/tracker/data/json)
- [ ] Implement Red Hat Bugzilla API stub (cache-based, due to auth complexity)
- [ ] Implement Ubuntu CVE Tracker scraper (https://ubuntu.com/security/cves)
- [ ] Cache results (1 hour TTL)
- [ ] Integration test: Debian bug #987654 CVE-2024-1234
**Stub Implementation:**
```csharp
public interface IBugCveMappingService
{
ValueTask<IReadOnlyList<CveId>> LookupCvesAsync(
BugId bugId,
CancellationToken cancellationToken = default);
}
public sealed class DebianSecurityTrackerClient : IBugCveMappingService
{
private readonly HttpClient _http;
private readonly IMemoryCache _cache;
public async ValueTask<IReadOnlyList<CveId>> LookupCvesAsync(
BugId bugId,
CancellationToken ct = default)
{
if (bugId.Tracker != "Debian")
return [];
var cacheKey = $"debian:bug:{bugId.Id}";
if (_cache.TryGetValue(cacheKey, out IReadOnlyList<CveId>? cached))
return cached!;
var json = await _http.GetStringAsync(
"https://security-tracker.debian.org/tracker/data/json",
ct);
// Parse JSON, extract CVEs for bug ID
var cves = ParseDebianTrackerJson(json, bugId.Id);
_cache.Set(cacheKey, cves, TimeSpan.FromHours(1));
return cves;
}
}
```
---
### Phase 4: Function Extraction from Hunks (P2)
#### Task 4.1: Add Function Signature Extraction to HunkSigExtractor
- **File:** src/Feedser/StellaOps.Feedser.Core/HunkSigExtractor.cs
- **Effort:** 4h
- **Acceptance Criteria:**
- [ ] Extract C/C++ functions (static void foo(, int main()
- [ ] Extract Python functions (def foo(, class Foo:)
- [ ] Extract Go functions ( unc (r *Receiver) Method()
- [ ] Populate PatchHunkSig.AffectedFunctions
- [ ] Unit test: C patch with static int ssl_verify(SSL *ssl) function extracted
**Code Snippet:**
```csharp
[GeneratedRegex(@"^\s*(?:static\s+|inline\s+)?(?:\w+\s+)+(\w+)\s*\(", RegexOptions.Multiline)]
private static partial Regex CFunctionRegex();
[GeneratedRegex(@"^\s*def\s+(\w+)\s*\(", RegexOptions.Multiline)]
private static partial Regex PythonFunctionRegex();
[GeneratedRegex(@"^\s*func\s+(?:\(\w+\s+\*?\w+\)\s+)?(\w+)\s*\(", RegexOptions.Multiline)]
private static partial Regex GoFunctionRegex();
private static IReadOnlyList<string> ExtractFunctionsFromContext(PatchHunk hunk)
{
var functions = new HashSet<string>();
var context = hunk.Context;
foreach (Match m in CFunctionRegex().Matches(context))
functions.Add(m.Groups[1].Value);
foreach (Match m in PythonFunctionRegex().Matches(context))
functions.Add(m.Groups[1].Value);
foreach (Match m in GoFunctionRegex().Matches(context))
functions.Add(m.Groups[1].Value);
return functions.ToArray();
}
// Update ExtractHunkSigs:
AffectedFunctions = ExtractFunctionsFromContext(hunk),
```
---
### Phase 5: Confidence Tier Realignment (P2)
#### Task 5.1: Update RulePriority Enum
- **File:** src/Concelier/__Libraries/StellaOps.Concelier.BackportProof/Models/FixRuleModels.cs
- **Effort:** 1h
- **Acceptance Criteria:**
- [ ] Rename/add priority values to match 5-tier hierarchy
- [ ] Ensure tier ordering: Tier 1 > Tier 2 > ... > Tier 5
- [ ] Update existing rule creation code to use new priorities
- [ ] Unit test: Verify priority ordering in resolver
**Code Snippet:**
```csharp
public enum RulePriority
{
// Tier 1: Derivative OVAL/CSAF
DistroNativeOval = 100,
DerivativeOvalHigh = 95, // Alma/Rocky for RHEL
DerivativeOvalMedium = 90, // Mint for Ubuntu
// Tier 2: Changelog markers
ChangelogExplicitCve = 85,
ChangelogBugIdMapped = 75,
// Tier 3: Source patch files
SourcePatchExactMatch = 70,
SourcePatchFuzzyMatch = 60,
// Tier 4: Upstream commit mapping
UpstreamCommitExactParity = 55,
UpstreamCommitPartialMatch = 45,
// Tier 5: NVD range fallback
NvdRangeHeuristic = 20
}
```
---
#### Task 5.2: Map Priorities to Confidence Levels
- **File:** BackportStatusService.cs
- **Effort:** 1h
- **Acceptance Criteria:**
- [ ] Add GetConfidenceForPriority(RulePriority) helper
- [ ] Return VerdictConfidence.High for Tier 1-2, Medium for Tier 3-4, Low for Tier 5
- [ ] Use in all verdict creation paths
- [ ] Unit test: Priority 100 High, Priority 20 Low
**Code Snippet:**
```csharp
private static VerdictConfidence GetConfidenceForPriority(RulePriority priority) =>
priority switch
{
>= RulePriority.ChangelogBugIdMapped => VerdictConfidence.High,
>= RulePriority.UpstreamCommitPartialMatch => VerdictConfidence.Medium,
_ => VerdictConfidence.Low
};
```
---
## Testing Strategy
### Unit Tests (Per Task)
- Task 1.1: BackportStatusServiceTests.cs::VersionComparatorIntegration
- Task 1.2: BackportStatusServiceTests.cs::RangeRuleEvaluation
- Task 2.1: DistroMappingsTests.cs
- Task 2.2: BackportStatusServiceTests.cs::DerivativeDistroMapping
- Task 3.1: ChangelogParserTests.cs::BugIdExtraction
- Task 3.2: BugCveMappingServiceTests.cs
- Task 4.1: HunkSigExtractorTests.cs::FunctionExtraction
- Task 5.1/5.2: FixRuleModelsTests.cs::ConfidenceMapping
### Integration Tests (Golden Cases)
#### Test Case 1: CVE-2024-26130 (OpenSSL on Rocky 9)
```yaml
Scenario: Backported fix with derivative OVAL
Given:
- CVE: CVE-2024-26130
- Package: openssl-3.0.7-24.el9
- Distro: rocky 9
- OVAL exists for: almalinux 9 (not rocky 9)
Expected:
- Status: FIXED
- Confidence: High (0.95)
- Evidence: AlmaLinux OVAL ALSA-2024-1234, mapped to Rocky
- Tier: 1 (Derivative OVAL)
```
#### Test Case 2: CVE-2023-12345 (curl on Debian with bug ID)
```yaml
Scenario: Changelog with Debian bug ID
Given:
- CVE: CVE-2023-12345
- Package: curl-7.88.1-10+deb12u1
- Distro: debian 12
- Changelog: "Closes: #987654" (maps to CVE-2023-12345)
Expected:
- Status: FIXED
- Confidence: Medium (0.75)
- Evidence: Debian changelog, bug #987654 CVE-2023-12345
- Tier: 2 (Changelog bug ID)
```
#### Test Case 3: CVE-2024-99999 (zlib with NVD range only)
```yaml
Scenario: Fallback to NVD range
Given:
- CVE: CVE-2024-99999
- Package: zlib-1.2.11-r3
- Distro: alpine 3.18
- No OVAL, no changelog, no patches
- NVD range: [1.2.0, 1.2.12) vulnerable
Expected:
- Status: VULNERABLE
- Confidence: Low (0.40)
- Evidence: NVD CPE range heuristic
- Tier: 5 (NVD fallback)
```
### Performance Tests
- Measure resolver latency: target <50ms for Tier 1-3, <500ms for Tier 4 (upstream git)
- Bulk scan: 10,000 CVEpackage combinations should complete within 5 minutes
- Cache hit rate for bugCVE mapping: target >80%
---
## Rollout Plan
### Phase 1 (Week 1): P0 Tasks
- Days 1-2: Task 1.1 (Version comparators) + Task 1.2 (Range rules)
- Day 3: Unit tests + integration testing
- Day 4: Deploy to staging, validate with golden cases
- Day 5: Production canary (10% traffic)
### Phase 2 (Week 2): P1 Tasks
- Days 1-2: Task 2.1 + 2.2 (Derivative mapping)
- Days 3-4: Task 3.1 + 3.2 (Bug ID mapping)
- Day 5: Full production rollout
### Phase 3 (Week 3): P2 Polish
- Days 1-2: Task 4.1 (Function extraction)
- Day 3: Task 5.1 + 5.2 (Confidence realignment)
- Days 4-5: Documentation + observability dashboards
---
## Success Metrics
| Metric | Baseline (Current) | Target (Post-Sprint) | Measurement |
|--------|-------------------|----------------------|-------------|
| False positive rate | 35% | <5% | Manual audit of 500 random verdicts |
| False negative rate | 12% | <3% | Regression test suite (50 known vulns) |
| Tier 1 evidence usage | 0% | >40% | % verdicts using derivative OVAL |
| Tier 5 fallback rate | 100% | <20% | % verdicts from NVD ranges only |
| Average confidence score | 0.50 (Medium) | >0.75 (Medium-High) | Weighted average of verdicts |
| Time to verdict | 150ms | <100ms | P95 latency for single CVE evaluation |
---
## Risk Mitigation
| Risk | Likelihood | Impact | Mitigation |
|------|-----------|--------|------------|
| Version comparer regressions | Medium | High | Extensive unit tests, gradual rollout with canary |
| Derivative OVAL mismatch (NEVRA drift) | Low | Medium | Require exact NEVRA match, log mismatches |
| Bug tracker APIs rate-limit/fail | High | Medium | Aggressive caching (1h TTL), fallback to direct CVE only |
| Function extraction false positives | Medium | Low | Fuzzy matching with threshold, manual review for P2 |
| Confidence inflation | Low | High | Audit trail of all evidence, periodic manual validation |
---
## Appendix
### A. File Modification Checklist
- [ ] BackportStatusService.cs (Tasks 1.1, 1.2, 2.2, 5.2)
- [ ] FixRuleModels.cs (Task 5.1)
- [ ] ChangelogParser.cs (Task 3.1)
- [ ] HunkSigExtractor.cs (Task 4.1)
- [ ] New: DistroDerivative.cs (Task 2.1)
- [ ] New: IBugCveMappingService.cs + implementations (Task 3.2)
### B. Dependency Updates
```xml
<ItemGroup>
<ProjectReference Include="..\StellaOps.VersionComparison\StellaOps.VersionComparison.csproj" />
<ProjectReference Include="..\StellaOps.DistroIntel\StellaOps.DistroIntel.csproj" />
<ProjectReference Include="..\StellaOps.BugTracking\StellaOps.BugTracking.csproj" />
</ItemGroup>
```
### C. Configuration Changes
```json
{
"BackportResolver": {
"EnableDerivativeMapping": true,
"DerivativeConfidencePenalty": 0.05,
"BugTrackerCache": {
"TtlHours": 1,
"MaxEntries": 10000
},
"TierTimeouts": {
"Tier1Ms": 500,
"Tier2Ms": 200,
"Tier3Ms": 300,
"Tier4Ms": 2000,
"Tier5Ms": 100
}
}
}
```
---
**End of Sprint Document**

View File

@@ -105,6 +105,9 @@ src/
* `config set/get` — endpoint & defaults.
* `whoami` — short auth display.
* `version` — CLI + protocol versions; release channel.
* `tools policy-dsl-validate <paths...> [--strict] [--json]`
* `tools policy-schema-export [--output <dir>] [--repo-root <path>]`
* `tools policy-simulation-smoke [--scenario-root <path>] [--output <dir>] [--repo-root <path>] [--fixed-time <ISO-8601>]`
### 2.9 Aggregation-only guard helpers

View File

@@ -11,6 +11,7 @@ Immutable, append-only event ledger for tracking vulnerability findings, policy
## Quick links
- FL1FL10 remediation tracker: `gaps-FL1-FL10.md`
- Implementation plan: `implementation_plan.md`
- Schema catalog (events/projections/exports): `schema-catalog.md`
- Merkle & external anchor policy: `merkle-anchor-policy.md`
- Tenant isolation & redaction manifest: `tenant-isolation-redaction.md`

View File

@@ -0,0 +1,33 @@
# Findings Ledger Implementation Plan
## Purpose
Define the delivery plan for the Findings Ledger service, replay harness, observability, and air-gap provenance so audits can verify deterministic state reconstruction.
## Active work
- No active sprint tracked here yet. Use `docs/modules/findings-ledger/gaps-FL1-FL10.md` for remediation tracking.
## Near-term deliverables
- Observability baselines: metrics, logs, traces, dashboards, and alert rules per `docs/modules/findings-ledger/observability.md`.
- Determinism harness: replay CLI, fixtures, and signed reports per `docs/modules/findings-ledger/replay-harness.md`.
- Deployment collateral: Compose/Helm overlays, migrations, and backup/restore runbooks per `docs/modules/findings-ledger/deployment.md`.
- Provenance extensions: air-gap bundle metadata, staleness enforcement, and sealed-mode timeline entries per `docs/modules/findings-ledger/airgap-provenance.md`.
## Dependencies
- Observability schema approval for metrics and dashboards.
- Orchestrator export schema freeze for provenance linkage.
- QA lab capacity for >=5M findings/tenant replay harness.
- DevOps review of Compose/Helm overlays and offline kit packaging.
## Evidence of completion
- `src/Findings/StellaOps.Findings.Ledger` and `src/Findings/tools/LedgerReplayHarness` updated with deterministic behavior and tests.
- Replay harness reports (`harness-report.json` + DSSE) stored under approved offline kit locations.
- Dashboard JSON and alert rules committed under `offline/telemetry/dashboards/ledger` or `ops/devops/findings-ledger/**`.
- Deployment and backup guidance validated against `docs/modules/findings-ledger/deployment.md`.
## Reference docs
- `docs/modules/findings-ledger/schema.md`
- `docs/modules/findings-ledger/replay-harness.md`
- `docs/modules/findings-ledger/observability.md`
- `docs/modules/findings-ledger/deployment.md`
- `docs/modules/findings-ledger/airgap-provenance.md`
- `docs/modules/findings-ledger/workflow-inference.md`

View File

@@ -0,0 +1,617 @@
Heres a compact, plugandplay plan to build a **crossdistro “golden set”** so your retrieval can correctly handle **backported fixes** and avoid false “still vulnerable” flags.
---
# What this golden set is
A small, curated corpus of tuples **(distro, release, package, CVE)** with:
* the **vendordeclared fixed version** (what the distro claims)
* a **counterexample** where **upstream is still affected** but the distro **backported** the patch (so version comparison alone would be misleading)
Use it as regression tests + seed facts for your policy engine and matchers.
---
# Minimum schema (normalize for reuse)
**Tables**
* `vendor_package`
`(vendor_id, distro, release, src_name, bin_name, epoch, version, revision, arch)`
* `cve`
`(cve_id, description, CWE, published, severity, cvss_vector)`
* `fix_decl` (vendor declarations)
`(distro, release, src_name, cve_id, status ENUM('fixed','not_affected','affected','wont_fix'), fixed_epoch, fixed_version, fixed_revision, evidence_uri, evidence_hash, declared_at)`
* `patch_evidence` (backport facts)
`(distro, release, src_name, cve_id, patch_id, upstream_commit, backport_commit, applied_in_epoch, applied_in_version, applied_in_revision, diff_hash, proving_fn ENUM('hunk','symbol','function','binary'), notes)`
* `upstream_affects` (ground truth on upstream tags)
`(project, cve_id, affected_range (SemVer/commit range), last_affected_tag, first_fixed_tag, fix_commit)`
* `golden_case` (test harness)
`(case_id, distro, release, src_name, bin_name, cve_id, vendor_fixed_spec, upstream_state ENUM('still_affected','fixed'), backport_present BOOL, rationale)`
**Indexes**
* `idx_fix_decl_key (distro, release, src_name, cve_id)`
* `idx_patch_evidence_key (distro, release, src_name, cve_id)`
* `idx_upstream_affects (project, cve_id)`
---
# Version math you must use
Implement distrospecific comparators:
* **Debian/Ubuntu**: `dpkg --compare-versions` (Epoch:Version-Revision)
* **RHEL/Fedora/CentOS/SUSE**: **RPMVERCMP** (Epoch:Version-Release)
* **Alpine**: **apk version** rules
Store a normalized sortable key (e.g., `verkey`) alongside raw fields for each family.
---
# Goldenset curation algorithm (daily job)
1. **Select targets**
* Choose top N packages (openssl, glibc, curl, zlib, libxml2, expat, xz, sudo, bash, systemd, sqlite, curl, busybox, python3 stdlib, musl, libssh2, libx11, nginx, apache, postgresql, mariadb, openssh).
* Cross all with major CVEs known to have **backports**.
2. **Ingest vendor claims**
* Scrape/consume security trackers (Debian, Ubuntu USN, RHEL, SUSE, Alpine, Fedora). Normalize into `fix_decl`.
* Compute `verkey_fixed`.
3. **Verify backport reality**
* For each `(distro, release, pkg, cve)` with status “fixed” where **upstream tag** still falls in `affected_range`:
* Pull **src package diff** (dsc+patches or SRPM .patch).
* Extract fixhunks (functions/symbols) from upstream `fix_commit`.
* Run **proving functions**:
* `hunk` match: patch hunks present
* `symbol/function` match: AST/name diff present
* `binary` match: pattern in compiled object (for golden set, keep sourcelevel first)
* If proof ≥ threshold, write to `patch_evidence` and set `backport_present=true` in `golden_case`.
4. **Create counterexample**
* Ensure at least one case per distro where:
* **Upstream version number looks vulnerable**, but distro has **backport evidence** → mark as **“counterexample”** in `golden_case`.
5. **Attest facts**
* Generate DSSE/intoto attestations for each row (content hash of patches/diffs + URLs). Store `evidence_hash`.
---
# Retrievaltime decision function (pseudo)
```
bool is_vulnerable(pkg, ver, distro, release, cve):
decl = get_fix_decl(distro, release, pkg.src, cve)
if decl is null:
return heuristic_by_upstream_ranges(pkg.project, ver, cve)
if decl.status == 'not_affected': return false
if decl.status == 'wont_fix': return true // unless patch_evidence says otherwise
// status == 'fixed' -> check two paths
if compare(ver, decl.fixed_spec, distro_family) >= 0:
return false // version >= declared fixed
// version < declared fixed: still check for backport proof pinned to our exact build
if has_patch_evidence(distro, release, pkg.src, cve, ver):
return false // verified backport on this version/build
return true
```
**Note:** `has_patch_evidence` should accept **(epoch, version, revision)** and allow `applied_in_* <= installed_*`.
---
# Golden test harness (what “must pass”)
For each `golden_case`:
1. Resolve installed `(epoch,version,revision)`.
2. Evaluate `is_vulnerable`.
3. Assert expected:
* **Vendorfixed + backport_present → expected false** even if upstream says affected.
* **No backport + version < fixed_spec expected true**.
Emit a short **VEX** (CycloneDX VEX or CSAF) per case to keep your engine VEXfirst.
---
# Minimal data loaders (first pass)
* **Debian/Ubuntu**: `security-tracker`, USN JSON, `Sources` + `.dsc` + `debian/patches/*`.
* **RHEL/Fedora/SUSE**: OVAL/OVALRPM, advisories (RHSA/SUSESU), SRPM patches.
* **Alpine**: `secdb`, `APKBUILD` diffs (`.patch` in `community/main`).
---
# Ship list (MVP → Week 12)
* Parsers: dpkg/rpm/apk version compare libs in C# (+ test vectors).
* Ingestors for **Debian, Ubuntu, RHEL, SUSE, Alpine, Fedora** `fix_decl`.
* Patch proof: hunkmatcher (linefuzzy, filename maps), symbolfinder (ctags or Roslyn/ctagslike for C).
* 50100 curated `golden_case` rows with airtight evidence.
If you want, I can drop a readytouse PostgreSQL DDL + sample rows and a C# `VersionComparer` + `BackportProof` interface next.
# Golden Set of Backport Test Cases (Distro Release Package CVE)
Each row highlights a case where a distro shipped a **patched older version** below the upstream fixed version. This causes naive version checks to wrongly flag the package as vulnerable. We include the vendors fixed package version, the upstream version range still affected (i.e. up to but _not_ including the upstream fix), evidence of the backport (patch/changelog references), a flag if upstream would consider the vendors version vulnerable, and a brief rationale.
| **Distro (Release)** | **Source Package** | **CVE ID** | **Vendor Fixed Version** | **Upstream Affected Versions** | **Backport Evidence** | **Upstream Says Affected?** | **Rationale** |
| --- | --- | --- | --- | --- | --- | --- | --- |
| Debian 7 Wheezy | openssl | CVE-2014-0160 | 1.0.1e-2+deb7u5[lists.debian.org](https://lists.debian.org/debian-security-announce/2014/msg00071.html#:~:text=For%20the%20stable%20distribution%20,2%2Bdeb7u5) | 1.0.1 through 1.0.1f (fixed in 1.0.1g)[security-tracker.debian.org](https://security-tracker.debian.org/tracker/CVE-2014-0160#:~:text=Name%20CVE,Debian%20ELTS%2C%20%208%20Red)[security-tracker.debian.org](https://security-tracker.debian.org/tracker/CVE-2014-0160#:~:text=Package%20Type%20Release%20Fixed%20Version,1743883) | **Yes** (1.0.1e < 1.0.1g) | Version 1.0.1e with Heartbleed patch applied (Debian backported fix)[lists.debian.org](https://lists.debian.org/debian-security-announce/2014/msg00071.html#:~:text=For%20the%20stable%20distribution%20,2%2Bdeb7u5). Upstream requires 1.0.1g, so 1.0.1e is normally seen as vulnerable. | |
| RHEL 6 (6.5) | openssl | CVE-2014-0160 | 1.0.1e-16.el6\_5.7[helpdesk.kaseya.com](https://helpdesk.kaseya.com/hc/en-gb/articles/4407522717329-CVE-2014-0160-OpenSSL-Heartbleed-Vulnerability#:~:text=If%20CentOS6%2C%20apply%20Unitrends%20security,42.el6) | 1.0.1 through 1.0.1f (fixed in 1.0.1g)[security-tracker.debian.org](https://security-tracker.debian.org/tracker/CVE-2014-0160#:~:text=Description%20The%20,Debian%20ELTS%2C%20%208%20Red) | **Yes** (1.0.1e < 1.0.1g) | Version 1.0.1e with Heartbleed fix backported (openssl-1.0.1e-16.el6). Upstream 1.0.1e is Heartbleed-affected[helpdesk.kaseya.com](https://helpdesk.kaseya.com/hc/en-gb/articles/4407522717329-CVE-2014-0160-OpenSSL-Heartbleed-Vulnerability#:~:text=If%20CentOS6%2C%20apply%20Unitrends%20security,42.el6). | |
| RHEL 7 | openssl | CVE-2020-1971 | 1.0.2k-21.el7\_9[suse.com](https://www.suse.com/security/cve/CVE-2020-1971.html#:~:text=CVE,21.el7_9%3B%20openssl)[linuxsecurity.com](https://linuxsecurity.com/advisories/scilinux/scilinux-slsa-2020-5566-1-important-openssl-on-sl7-x-x86-64-14-12-13#:~:text=Update%20linuxsecurity,21.el7_9.i686.rpm) | OpenSSL 1.1.1h and 1.0.2(-unsupported) (fixed in 1.1.1i & 1.0.2u)[openssl-library.org](https://openssl-library.org/news/timeline/#:~:text=Release%20and%20Advisory%20Timeline%20,Truncated%20packet%20could) | **Yes** (1.0.2k < 1.0.2u) | OpenSSL 1.0.2k with NULL pointer deref fix backported (RHEL7 openssl-1.0.2k-21). Upstream says 1.0.2k is affected[suse.com](https://www.suse.com/security/cve/CVE-2020-1971.html#:~:text=CVE,21.el7_9%3B%20openssl)[linuxsecurity.com](https://linuxsecurity.com/advisories/scilinux/scilinux-slsa-2020-5566-1-important-openssl-on-sl7-x-x86-64-14-12-13#:~:text=Update%20linuxsecurity,21.el7_9.i686.rpm). | |
| Ubuntu 20.04 LTS Focal | apache2 | CVE-2024-39573 | 2.4.41-4ubuntu3.19[ubuntu.com](https://ubuntu.com/security/CVE-2024-39573#:~:text=22) | Apache HTTPd 2.4.59 (fixed in 2.4.60)[ubuntu.com](https://ubuntu.com/security/CVE-2024-39573#:~:text=Description) | **Yes** (2.4.41 < 2.4.60) | Apache 2.4.41 with SSRF fix backported (Ubuntu patchset). Version 2.4.41 is below upstream 2.4.60 fix[ubuntu.com](https://ubuntu.com/security/CVE-2024-39573#:~:text=22)[ubuntu.com](https://ubuntu.com/security/CVE-2024-39573#:~:text=Potential%20SSRF%20in%20mod_rewrite%20in,60%2C%20which%20fixes%20this%20issue). | |
| SUSE SLE 12 SP5 | apache2 | CVE-2024-39573 | 2.4.51-35.51.1 (patched build)[suse.com](https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/#:~:text=Security%20fixes%3A)[suse.com](https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/#:~:text=Package%20List%3A) | Apache HTTPd 2.4.59 (fixed in 2.4.60)[ubuntu.com](https://ubuntu.com/security/CVE-2024-39573#:~:text=Potential%20SSRF%20in%20mod_rewrite%20in,60%2C%20which%20fixes%20this%20issue) | **Yes** (2.4.51 < 2.4.60) | Apache 2.4.51 in SLES12 SP5 with backported fix[suse.com](https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/#:~:text=Security%20fixes%3A). Upstream considers <2.4.60 vulnerable, so 2.4.51 would normally be flagged. | |
| SUSE SLE 12 SP5 | apache2 | CVE-2024-38477 | 2.4.51-35.51.1 (same update)[suse.com](https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/#:~:text=Security%20fixes%3A) | Apache HTTPd 2.4.59 (fixed by 2.4.60)[ubuntu.com](https://ubuntu.com/security/CVE-2024-39573#:~:text=Potential%20SSRF%20in%20mod_rewrite%20in,60%2C%20which%20fixes%20this%20issue) | **Yes** (2.4.51 < 2.4.60) | Apache mod\_proxy null-pointer fix backported into 2.4.51[suse.com](https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/#:~:text=Security%20fixes%3A). Version appears older than upstream fix version. | |
| SUSE SLE 12 SP5 | apache2 | CVE-2024-38475 | 2.4.51-35.51.1 (same update)[suse.com](https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/#:~:text=%2A%20CVE,1227268) | Apache HTTPd 2.4.59 (fixed by 2.4.60)[ubuntu.com](https://ubuntu.com/security/CVE-2024-39573#:~:text=Potential%20SSRF%20in%20mod_rewrite%20in,60%2C%20which%20fixes%20this%20issue) | **Yes** (2.4.51 < 2.4.60) | Apache mod\_rewrite output-escaping issue fixed on 2.4.51 via patch[suse.com](https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/#:~:text=%2A%20CVE,1227268). Vendor version < upstream fixed version. | |
| Debian 9 Stretch | openssh | CVE-2018-15473 | 1:7.4p1-10+deb9u4[lists.debian.org](https://lists.debian.org/debian-security-announce/2018/msg00209.html#:~:text=For%20the%20stable%20distribution%20,10%2Bdeb9u4) | OpenSSH 7.7 (fixed in 7.8/7.9)[security-tracker.debian.org](https://security-tracker.debian.org/tracker/CVE-2018-15473#:~:text=CVE,an%20invalid%20authenticating%20user) | **Yes** (7.4 < 7.8) | OpenSSH 7.4p1 (Stretch) patched for user-enumeration flaw[lists.debian.org](https://lists.debian.org/debian-security-announce/2018/msg00209.html#:~:text=Dariusz%20Tytko%2C%20Michal%20Sajdak%20and,existed%20on%20the%20target%20server). Upstream required 7.8, so 7.4p1 normally seen as affected. | |
| Debian 10 Buster | sudo | CVE-2021-3156 | 1.8.27-1+deb10u3[security-tracker.debian.org](https://security-tracker.debian.org/tracker/CVE-2021-3156#:~:text=Package%20Type%20Release%20Fixed%20Version,1.1) | sudo <1.9.5p2 (fixed in 1.9.5p2)[security-tracker.debian.org](https://security-tracker.debian.org/tracker/CVE-2021-3156#:~:text=Name%20CVE,ELTS%2C%20Red%20Hat%2C%20Ubuntu%2C%20Gentoo)[security-tracker.debian.org](https://security-tracker.debian.org/tracker/CVE-2021-3156#:~:text=sudo%20%28PTS%29bullseye%201.9.5p2,1%20fixed) | **Yes** (1.8.27 < 1.9.5p2) | sudo 1.8.27 in Buster with Baron Samedit patch[security-tracker.debian.org](https://security-tracker.debian.org/tracker/CVE-2021-3156#:~:text=Package%20Type%20Release%20Fixed%20Version,1.1). Upstream says versions below 1.9.5p2 are vulnerable, so 1.8.27 would be flagged[security-tracker.debian.org](https://security-tracker.debian.org/tracker/CVE-2021-3156#:~:text=Name%20CVE,ELTS%2C%20Red%20Hat%2C%20Ubuntu%2C%20Gentoo). | |
| RHEL 7 | sudo | CVE-2019-14287 | 1.8.23-4.el7\_7.1[suse.com](https://www.suse.com/security/cve/CVE-2019-14287.html#:~:text=CVE,4.el7_7.1.%20Patchnames%3A%20RHSA) | sudo 1.8.27 (fixed in 1.8.28)[suse.com](https://www.suse.com/security/cve/CVE-2019-14287.html#:~:text=CVE,4.el7_7.1.%20Patchnames%3A%20RHSA)[nvd.nist.gov](https://nvd.nist.gov/vuln/detail/cve-2017-1000367#:~:text=Todd%20Miller%27s%20sudo%20version%201,function) | **Yes** (1.8.23 < 1.8.28) | sudo 1.8.23 in RHEL7 patched for Runas All bug[suse.com](https://www.suse.com/security/cve/CVE-2019-14287.html#:~:text=CVE,4.el7_7.1.%20Patchnames%3A%20RHSA). Upstream fix came later (1.8.28), so 1.8.23 is normally marked affected. | |
| Debian 8 Jessie | sudo | CVE-2017-1000367 | 1.8.10p3-1+deb8u4[lists.debian.org](https://lists.debian.org/debian-security-announce/2017/msg00127.html#:~:text=an%20SELinux,full%20root%20privileges) | sudo 1.8.20 (fixed in 1.8.21)[nvd.nist.gov](https://nvd.nist.gov/vuln/detail/cve-2017-1000367#:~:text=Todd%20Miller%27s%20sudo%20version%201,function)[security.snyk.io](https://security.snyk.io/vuln/SNYK-DEBIAN9-SUDO-406955#:~:text=Race%20Condition%20in%20sudo%20,2%20or%20higher.%20NVD%20Description) | **Yes** (1.8.10 < 1.8.21) | sudo 1.8.10p3 in Jessie got the tty hijack fix backported[lists.debian.org](https://lists.debian.org/debian-security-announce/2017/msg00127.html#:~:text=an%20SELinux,full%20root%20privileges). Upstream resolved it in a much newer sudo release, so 1.8.10p3 would appear vulnerable. | |
| Ubuntu 12.04 LTS Precise | bash | CVE-2014-6271 | 4.2-2ubuntu2.5[askubuntu.com](https://askubuntu.com/questions/528101/what-is-the-cve-2014-6271-bash-vulnerability-shellshock-and-how-do-i-fix-it#:~:text=dpkg%20,Version) | Bash 4.3 (fixed in 4.3 patch)[security-tracker.debian.org](https://security-tracker.debian.org/tracker/CVE-2014-6271#:~:text=Description%20GNU%20Bash%20through%204,present%20after%20the%20incorrect%20fix) | **Yes** (4.2 < 4.3-fixed) | Bash 4.2 on Precise patched for Shellshock[askubuntu.com](https://askubuntu.com/questions/528101/what-is-the-cve-2014-6271-bash-vulnerability-shellshock-and-how-do-i-fix-it#:~:text=dpkg%20,Version). Version 4.2 is below upstream 4.3 fix, so normally flagged as Shellshock-vulnerable. | |
| Debian 10 Buster (LTS) | curl | CVE-2023-27533 | 7.64.0-4+deb10u6[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=Package%20%20%20%20,27538)[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=For%20Debian%2010%20buster%2C%20these,4%2Bdeb10u6) | curl <8.0.0 (fixed in 8.0.0)[security-tracker.debian.org](https://security-tracker.debian.org/tracker/source-package/curl#:~:text=A%20path%20traversal%20vulnerability%20exists,8.0%20during) | **Yes** (7.64.0 < 8.0.0) | curl 7.64.0 with TELNET injection fix backported (Debian LTS)[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=Package%20%20%20%20,27538)[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=For%20Debian%2010%20buster%2C%20these,4%2Bdeb10u6). Upstream requires curl 8.x, so 7.64.0 is seen as affected. | |
| Debian 10 Buster (LTS) | curl | CVE-2023-27535 | 7.64.0-4+deb10u6[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=Package%20%20%20%20,27538)[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=CVE) | curl <8.0.0 (fixed in 8.0.0)[security-tracker.debian.org](https://security-tracker.debian.org/tracker/source-package/curl#:~:text=A%20path%20traversal%20vulnerability%20exists,8.0%20during) | **Yes** (7.64.0 < 8.0.0) | curl 7.64.0 with FTP reuse auth bypass fix backported[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=CVE)[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=For%20Debian%2010%20buster%2C%20these,4%2Bdeb10u6). Version appears vulnerable by upstream standards (<8.0). | |
| Debian 10 Buster (LTS) | curl | CVE-2023-27536 | 7.64.0-4+deb10u6[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=Package%20%20%20%20,27538)[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=CVE) | curl <8.0.0 (fixed in 8.0.0)[security-tracker.debian.org](https://security-tracker.debian.org/tracker/source-package/curl#:~:text=A%20path%20traversal%20vulnerability%20exists,8.0%20during) | **Yes** (7.64.0 < 8.0.0) | curl 7.64.0 with GSSAPI delegation reuse fix backported[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=CVE)[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=For%20Debian%2010%20buster%2C%20these,4%2Bdeb10u6). Upstream would mark 7.64.0 vulnerable. | |
| Debian 10 Buster (LTS) | curl | CVE-2023-27538 | 7.64.0-4+deb10u6[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=Package%20%20%20%20,27538)[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=CVE) | curl <8.0.0 (fixed in 8.0.0)[security-tracker.debian.org](https://security-tracker.debian.org/tracker/source-package/curl#:~:text=A%20path%20traversal%20vulnerability%20exists,8.0%20during) | **Yes** (7.64.0 < 8.0.0) | curl 7.64.0 with SSH connection reuse fix backported[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=CVE)[lists.debian.org](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=For%20Debian%2010%20buster%2C%20these,4%2Bdeb10u6). Version number <8.0 means upstream would treat it as unfixed. | |
| Fedora 34 | glibc | CVE-2021-33574 | glibc-2.33-16.fc34[lists.fedoraproject.org](https://lists.fedoraproject.org/archives/list/package-announce%40lists.fedoraproject.org/message/RBUUWUGXVILQXVWEOU7N42ICHPJNAEUP/#:~:text=%5BSECURITY%5D%20Fedora%2034%20Update%3A%20glibc,11%202021%20Arjun%20Shankar) | glibc 2.33 (fixed in 2.34)[suse.com](https://www.suse.com/security/cve/CVE-2015-0235.html#:~:text=Heap,GHOST) | **Yes** (2.33 < 2.34) | glibc 2.33 with `mq_notify` use-after-free fix applied (Fedora update). Upstream fix came in 2.34, so 2.33 is normally flagged as vulnerable. | |
| RHEL 8 | glibc | CVE-2024-2961 | glibc-2.28-236.el8\_9.13[openwall.com](https://www.openwall.com/lists/oss-security/2024/04/17/9#:~:text=Public,31)[openwall.com](https://www.openwall.com/lists/oss-security/2024/04/17/9#:~:text=Vulnerable,263) | glibc 2.39 (fixed in 2.40)[openwall.com](https://www.openwall.com/lists/oss-security/2024/04/17/9#:~:text=Public,459) | **Yes** (2.28 < 2.40) | glibc 2.28 with `iconv()` overflow fix backported (RHEL8 patch)[openwall.com](https://www.openwall.com/lists/oss-security/2024/04/17/9#:~:text=Public,459). Upstream requires 2.40+, so 2.28 is considered affected. | |
| RHEL 7 | glibc | CVE-2015-0235 | glibc-2.17-55.el7\_0.5[suse.com](https://www.suse.com/security/cve/CVE-2015-0235.html#:~:text=%2A%20%60glibc%20%3E%3D%202.17,55.el7_0.5) | glibc 2.2 up to 2.17 (fixed in 2.18)[suse.com](https://www.suse.com/security/cve/CVE-2015-0235.html#:~:text=Heap,GHOST) | **Yes** (2.17 < 2.18) | glibc 2.17 with GHOST bug patched (RHEL7 update)[suse.com](https://www.suse.com/security/cve/CVE-2015-0235.html#:~:text=Product,SUSE%20Liberty%20Linux%207). Upstream fix was in 2.18; 2.17 is normally flagged as vulnerable[suse.com](https://www.suse.com/security/cve/CVE-2015-0235.html#:~:text=Heap,GHOST). | |
| RHEL 7 | systemd | CVE-2020-1712 | systemd-219-57.el7\_8 (patch backport)[alas.aws.amazon.com](https://alas.aws.amazon.com/AL2/ALAS2-2020-1388.html#:~:text=aarch64%3A%20systemd,57.amzn2.0.12.aarch64) | systemd 242 (fixed in 243)[alas.aws.amazon.com](https://alas.aws.amazon.com/AL2/ALAS2-2020-1388.html#:~:text=A%20heap%20use,1712) | **Yes** (219 < 243) | systemd 219 with use-after-free fix backported (RHEL7/AL2 update)[alas.aws.amazon.com](https://alas.aws.amazon.com/AL2/ALAS2-2020-1388.html#:~:text=A%20heap%20use,1712)[alas.aws.amazon.com](https://alas.aws.amazon.com/AL2/ALAS2-2020-1388.html#:~:text=aarch64%3A%20systemd,57.amzn2.0.12.aarch64). Upstream fix is in v243, so v219 would be marked vulnerable. | |
| Alpine 3.10 | musl libc | CVE-2020-28928 | 1.1.22-r4[security.alpinelinux.org](https://security.alpinelinux.org/vuln/CVE-2020-28928#:~:text=musl%20%20%2038%201.2.2_pre2,tracker%200.9.1%20%E2%80%94%20Source%20code) | musl 1.2.1 (fixed in 1.2.2)[security.alpinelinux.org](https://security.alpinelinux.org/vuln/CVE-2020-28928#:~:text=CPE%20URI%20Source%20package%20Min,1.2.1) | **Yes** (1.1.x < 1.2.2) | musl 1.1.22 with `wcsnrtombs()` overflow fixed (Alpine 3.10)[security.alpinelinux.org](https://security.alpinelinux.org/vuln/CVE-2020-28928#:~:text=musl%20%20%2038%201.2.2_pre2,tracker%200.9.1%20%E2%80%94%20Source%20code). Upstream fixed in 1.2.2, so 1.1.22 would normally be considered vulnerable. | |
| Ubuntu 20.04 LTS Focal | openssl | CVE-2022-0778 | 1.1.1f-1ubuntu2.15 (patched)[serverfault.com](https://serverfault.com/questions/1096683/how-can-i-know-that-ubuntu-18-04-bionics-latest-openssl-is-really-1-1-1n#:~:text=,fix%20to%20their%20chosen%20version) | OpenSSL 1.1.1m (fixed in 1.1.1n)[serverfault.com](https://serverfault.com/questions/1096683/how-can-i-know-that-ubuntu-18-04-bionics-latest-openssl-is-really-1-1-1n#:~:text=,fix%20to%20their%20chosen%20version) | **Yes** (1.1.1f < 1.1.1n) | OpenSSL 1.1.1f with BN infinite-loop fix backported (Ubuntu)[serverfault.com](https://serverfault.com/questions/1096683/how-can-i-know-that-ubuntu-18-04-bionics-latest-openssl-is-really-1-1-1n#:~:text=,fix%20to%20their%20chosen%20version). Upstream says only 1.1.1n+ is safe, so 1.1.1f appears vulnerable to scanners. | |
**Sources:** Vendor security advisories and trackers (Debian DSAs, Ubuntu CVE/USN pages, Red Hat errata, SUSE and Alpine trackers) are linked above to confirm patch versions and upstream fix info[lists.debian.org](https://lists.debian.org/debian-security-announce/2014/msg00071.html#:~:text=For%20the%20stable%20distribution%20,2%2Bdeb7u5)[ubuntu.com](https://ubuntu.com/security/CVE-2024-39573#:~:text=22)[suse.com](https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/#:~:text=%2A%20CVE,1227268)[openwall.com](https://www.openwall.com/lists/oss-security/2024/04/17/9#:~:text=Public,459)[security-tracker.debian.org](https://security-tracker.debian.org/tracker/CVE-2021-3156#:~:text=Package%20Type%20Release%20Fixed%20Version,1.1) etc. Each case demonstrates a backported security fix where the package version alone is misleading, helping test vulnerability scanners ability to detect patched-but-backported packages instead of raising false positives.
Citations
[
![](https://www.google.com/s2/favicons?domain=https://lists.debian.org&sz=32)
\[SECURITY\] \[DSA 2896-1\] openssl security update
https://lists.debian.org/debian-security-announce/2014/msg00071.html
](https://lists.debian.org/debian-security-announce/2014/msg00071.html#:~:text=For%20the%20stable%20distribution%20,2%2Bdeb7u5)[
![](https://www.google.com/s2/favicons?domain=https://security-tracker.debian.org&sz=32)
CVE-2014-0160
https://security-tracker.debian.org/tracker/CVE-2014-0160
](https://security-tracker.debian.org/tracker/CVE-2014-0160#:~:text=Name%20CVE,Debian%20ELTS%2C%20%208%20Red)[
![](https://www.google.com/s2/favicons?domain=https://security-tracker.debian.org&sz=32)
CVE-2014-0160
https://security-tracker.debian.org/tracker/CVE-2014-0160
](https://security-tracker.debian.org/tracker/CVE-2014-0160#:~:text=Package%20Type%20Release%20Fixed%20Version,1743883)[
CVE-2014-0160: OpenSSL Heartbleed Vulnerability Kaseya
https://helpdesk.kaseya.com/hc/en-gb/articles/4407522717329-CVE-2014-0160-OpenSSL-Heartbleed-Vulnerability
](https://helpdesk.kaseya.com/hc/en-gb/articles/4407522717329-CVE-2014-0160-OpenSSL-Heartbleed-Vulnerability#:~:text=If%20CentOS6%2C%20apply%20Unitrends%20security,42.el6)[
![](https://www.google.com/s2/favicons?domain=https://security-tracker.debian.org&sz=32)
CVE-2014-0160
https://security-tracker.debian.org/tracker/CVE-2014-0160
](https://security-tracker.debian.org/tracker/CVE-2014-0160#:~:text=Description%20The%20,Debian%20ELTS%2C%20%208%20Red)[
![](https://www.google.com/s2/favicons?domain=https://www.suse.com&sz=32)
CVE-2020-1971 Common Vulnerabilities and Exposures - SUSE
https://www.suse.com/security/cve/CVE-2020-1971.html
](https://www.suse.com/security/cve/CVE-2020-1971.html#:~:text=CVE,21.el7_9%3B%20openssl)[
![](https://www.google.com/s2/favicons?domain=https://linuxsecurity.com&sz=32)
Scientific Linux 7.x SLSA-2020-5566-1 Critical OpenSSL Update
https://linuxsecurity.com/advisories/scilinux/scilinux-slsa-2020-5566-1-important-openssl-on-sl7-x-x86-64-14-12-13
](https://linuxsecurity.com/advisories/scilinux/scilinux-slsa-2020-5566-1-important-openssl-on-sl7-x-x86-64-14-12-13#:~:text=Update%20linuxsecurity,21.el7_9.i686.rpm)[
![](https://www.google.com/s2/favicons?domain=https://openssl-library.org&sz=32)
Release and Advisory Timeline | OpenSSL Library
https://openssl-library.org/news/timeline/
](https://openssl-library.org/news/timeline/#:~:text=Release%20and%20Advisory%20Timeline%20,Truncated%20packet%20could)[
![](https://www.google.com/s2/favicons?domain=https://ubuntu.com&sz=32)
CVE-2024-39573 | Ubuntu
https://ubuntu.com/security/CVE-2024-39573
](https://ubuntu.com/security/CVE-2024-39573#:~:text=22)[
![](https://www.google.com/s2/favicons?domain=https://ubuntu.com&sz=32)
CVE-2024-39573 | Ubuntu
https://ubuntu.com/security/CVE-2024-39573
](https://ubuntu.com/security/CVE-2024-39573#:~:text=Description)[
![](https://www.google.com/s2/favicons?domain=https://ubuntu.com&sz=32)
CVE-2024-39573 | Ubuntu
https://ubuntu.com/security/CVE-2024-39573
](https://ubuntu.com/security/CVE-2024-39573#:~:text=Potential%20SSRF%20in%20mod_rewrite%20in,60%2C%20which%20fixes%20this%20issue)[
![](https://www.google.com/s2/favicons?domain=https://www.suse.com&sz=32)
Security update for apache2 SUSE-SU-2024:2436-1 | SUSE Support | SUSE
https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/
](https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/#:~:text=Security%20fixes%3A)[
![](https://www.google.com/s2/favicons?domain=https://www.suse.com&sz=32)
Security update for apache2 SUSE-SU-2024:2436-1 | SUSE Support | SUSE
https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/
](https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/#:~:text=Package%20List%3A)[
![](https://www.google.com/s2/favicons?domain=https://www.suse.com&sz=32)
Security update for apache2 SUSE-SU-2024:2436-1 | SUSE Support | SUSE
https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/
](https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/#:~:text=%2A%20CVE,1227268)[
![](https://www.google.com/s2/favicons?domain=https://www.suse.com&sz=32)
Security update for apache2 SUSE-SU-2024:2436-1 | SUSE Support | SUSE
https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/
](https://www.suse.com/support/update/announcement/2024/suse-su-20242436-1/#:~:text=%2A%20CVE,1227268)[
![](https://www.google.com/s2/favicons?domain=https://lists.debian.org&sz=32)
\[SECURITY\] \[DSA 4280-1\] openssh security update
https://lists.debian.org/debian-security-announce/2018/msg00209.html
](https://lists.debian.org/debian-security-announce/2018/msg00209.html#:~:text=For%20the%20stable%20distribution%20,10%2Bdeb9u4)[
![](https://www.google.com/s2/favicons?domain=https://security-tracker.debian.org&sz=32)
CVE-2018-15473 - Security Bug Tracker - Debian
https://security-tracker.debian.org/tracker/CVE-2018-15473
](https://security-tracker.debian.org/tracker/CVE-2018-15473#:~:text=CVE,an%20invalid%20authenticating%20user)[
![](https://www.google.com/s2/favicons?domain=https://lists.debian.org&sz=32)
\[SECURITY\] \[DSA 4280-1\] openssh security update
https://lists.debian.org/debian-security-announce/2018/msg00209.html
](https://lists.debian.org/debian-security-announce/2018/msg00209.html#:~:text=Dariusz%20Tytko%2C%20Michal%20Sajdak%20and,existed%20on%20the%20target%20server)[
![](https://www.google.com/s2/favicons?domain=https://security-tracker.debian.org&sz=32)
CVE-2021-3156
https://security-tracker.debian.org/tracker/CVE-2021-3156
](https://security-tracker.debian.org/tracker/CVE-2021-3156#:~:text=Package%20Type%20Release%20Fixed%20Version,1.1)[
![](https://www.google.com/s2/favicons?domain=https://security-tracker.debian.org&sz=32)
CVE-2021-3156
https://security-tracker.debian.org/tracker/CVE-2021-3156
](https://security-tracker.debian.org/tracker/CVE-2021-3156#:~:text=Name%20CVE,ELTS%2C%20Red%20Hat%2C%20Ubuntu%2C%20Gentoo)[
![](https://www.google.com/s2/favicons?domain=https://security-tracker.debian.org&sz=32)
CVE-2021-3156
https://security-tracker.debian.org/tracker/CVE-2021-3156
](https://security-tracker.debian.org/tracker/CVE-2021-3156#:~:text=sudo%20%28PTS%29bullseye%201.9.5p2,1%20fixed)[
![](https://www.google.com/s2/favicons?domain=https://www.suse.com&sz=32)
CVE-2019-14287 Common Vulnerabilities and Exposures - SUSE
https://www.suse.com/security/cve/CVE-2019-14287.html
](https://www.suse.com/security/cve/CVE-2019-14287.html#:~:text=CVE,4.el7_7.1.%20Patchnames%3A%20RHSA)[
![](https://www.google.com/s2/favicons?domain=https://nvd.nist.gov&sz=32)
CVE-2017-1000367 Detail - NVD
https://nvd.nist.gov/vuln/detail/cve-2017-1000367
](https://nvd.nist.gov/vuln/detail/cve-2017-1000367#:~:text=Todd%20Miller%27s%20sudo%20version%201,function)[
![](https://www.google.com/s2/favicons?domain=https://lists.debian.org&sz=32)
\[SECURITY\] \[DSA 3867-1\] sudo security update
https://lists.debian.org/debian-security-announce/2017/msg00127.html
](https://lists.debian.org/debian-security-announce/2017/msg00127.html#:~:text=an%20SELinux,full%20root%20privileges)[
![](https://www.google.com/s2/favicons?domain=https://security.snyk.io&sz=32)
Race Condition in sudo | CVE-2017-1000367 | Snyk
https://security.snyk.io/vuln/SNYK-DEBIAN9-SUDO-406955
](https://security.snyk.io/vuln/SNYK-DEBIAN9-SUDO-406955#:~:text=Race%20Condition%20in%20sudo%20,2%20or%20higher.%20NVD%20Description)[
![](https://www.google.com/s2/favicons?domain=https://askubuntu.com&sz=32)
security - What is the CVE-2014-6271 bash vulnerability (Shellshock) and how do I fix it? - Ask Ubuntu
https://askubuntu.com/questions/528101/what-is-the-cve-2014-6271-bash-vulnerability-shellshock-and-how-do-i-fix-it
](https://askubuntu.com/questions/528101/what-is-the-cve-2014-6271-bash-vulnerability-shellshock-and-how-do-i-fix-it#:~:text=dpkg%20,Version)[
![](https://www.google.com/s2/favicons?domain=https://security-tracker.debian.org&sz=32)
CVE-2014-6271
https://security-tracker.debian.org/tracker/CVE-2014-6271
](https://security-tracker.debian.org/tracker/CVE-2014-6271#:~:text=Description%20GNU%20Bash%20through%204,present%20after%20the%20incorrect%20fix)[
![](https://www.google.com/s2/favicons?domain=https://lists.debian.org&sz=32)
\[SECURITY\] \[DLA 3398-1\] curl security update
https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html
](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=Package%20%20%20%20,27538)[
![](https://www.google.com/s2/favicons?domain=https://lists.debian.org&sz=32)
\[SECURITY\] \[DLA 3398-1\] curl security update
https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html
](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=For%20Debian%2010%20buster%2C%20these,4%2Bdeb10u6)[
![](https://www.google.com/s2/favicons?domain=https://security-tracker.debian.org&sz=32)
Information on source package curl - Security Bug Tracker - Debian
https://security-tracker.debian.org/tracker/source-package/curl
](https://security-tracker.debian.org/tracker/source-package/curl#:~:text=A%20path%20traversal%20vulnerability%20exists,8.0%20during)[
![](https://www.google.com/s2/favicons?domain=https://lists.debian.org&sz=32)
\[SECURITY\] \[DLA 3398-1\] curl security update
https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html
](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=CVE)[
![](https://www.google.com/s2/favicons?domain=https://lists.debian.org&sz=32)
\[SECURITY\] \[DLA 3398-1\] curl security update
https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html
](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=CVE)[
![](https://www.google.com/s2/favicons?domain=https://lists.debian.org&sz=32)
\[SECURITY\] \[DLA 3398-1\] curl security update
https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html
](https://lists.debian.org/debian-lts-announce/2023/04/msg00025.html#:~:text=CVE)[
![](https://www.google.com/s2/favicons?domain=https://lists.fedoraproject.org&sz=32)
\[SECURITY\] Fedora 34 Update: glibc-2.33-16.fc34
https://lists.fedoraproject.org/archives/list/package-announce%40lists.fedoraproject.org/message/RBUUWUGXVILQXVWEOU7N42ICHPJNAEUP/
](https://lists.fedoraproject.org/archives/list/package-announce%40lists.fedoraproject.org/message/RBUUWUGXVILQXVWEOU7N42ICHPJNAEUP/#:~:text=%5BSECURITY%5D%20Fedora%2034%20Update%3A%20glibc,11%202021%20Arjun%20Shankar)[
![](https://www.google.com/s2/favicons?domain=https://www.suse.com&sz=32)
CVE-2015-0235 Common Vulnerabilities and Exposures | SUSE
https://www.suse.com/security/cve/CVE-2015-0235.html
](https://www.suse.com/security/cve/CVE-2015-0235.html#:~:text=Heap,GHOST)[
![](https://www.google.com/s2/favicons?domain=https://www.openwall.com&sz=32)
oss-security - The GNU C Library security advisories update for 2024-04-17: GLIBC-SA-2024-0004/CVE-2024-2961: ISO-2022-CN-EXT: fix out-of-bound writes when writing escape sequence
https://www.openwall.com/lists/oss-security/2024/04/17/9
](https://www.openwall.com/lists/oss-security/2024/04/17/9#:~:text=Public,31)[
![](https://www.google.com/s2/favicons?domain=https://www.openwall.com&sz=32)
oss-security - The GNU C Library security advisories update for 2024-04-17: GLIBC-SA-2024-0004/CVE-2024-2961: ISO-2022-CN-EXT: fix out-of-bound writes when writing escape sequence
https://www.openwall.com/lists/oss-security/2024/04/17/9
](https://www.openwall.com/lists/oss-security/2024/04/17/9#:~:text=Vulnerable,263)[
![](https://www.google.com/s2/favicons?domain=https://www.openwall.com&sz=32)
oss-security - The GNU C Library security advisories update for 2024-04-17: GLIBC-SA-2024-0004/CVE-2024-2961: ISO-2022-CN-EXT: fix out-of-bound writes when writing escape sequence
https://www.openwall.com/lists/oss-security/2024/04/17/9
](https://www.openwall.com/lists/oss-security/2024/04/17/9#:~:text=Public,459)[
![](https://www.google.com/s2/favicons?domain=https://www.suse.com&sz=32)
CVE-2015-0235 Common Vulnerabilities and Exposures | SUSE
https://www.suse.com/security/cve/CVE-2015-0235.html
](https://www.suse.com/security/cve/CVE-2015-0235.html#:~:text=%2A%20%60glibc%20%3E%3D%202.17,55.el7_0.5)[
![](https://www.google.com/s2/favicons?domain=https://www.suse.com&sz=32)
CVE-2015-0235 Common Vulnerabilities and Exposures | SUSE
https://www.suse.com/security/cve/CVE-2015-0235.html
](https://www.suse.com/security/cve/CVE-2015-0235.html#:~:text=Product,SUSE%20Liberty%20Linux%207)[
ALAS2-2020-1388
https://alas.aws.amazon.com/AL2/ALAS2-2020-1388.html
](https://alas.aws.amazon.com/AL2/ALAS2-2020-1388.html#:~:text=aarch64%3A%20systemd,57.amzn2.0.12.aarch64)[
ALAS2-2020-1388
https://alas.aws.amazon.com/AL2/ALAS2-2020-1388.html
](https://alas.aws.amazon.com/AL2/ALAS2-2020-1388.html#:~:text=A%20heap%20use,1712)[
CVE-2020-28928 — Alpine Security Tracker
https://security.alpinelinux.org/vuln/CVE-2020-28928
](https://security.alpinelinux.org/vuln/CVE-2020-28928#:~:text=musl%20%20%2038%201.2.2_pre2,tracker%200.9.1%20%E2%80%94%20Source%20code)[
CVE-2020-28928 — Alpine Security Tracker
https://security.alpinelinux.org/vuln/CVE-2020-28928
](https://security.alpinelinux.org/vuln/CVE-2020-28928#:~:text=CPE%20URI%20Source%20package%20Min,1.2.1)[
![](https://www.google.com/s2/favicons?domain=https://serverfault.com&sz=32)
How can I know that Ubuntu 18.04 Bionic's latest OpenSSL is really ...
https://serverfault.com/questions/1096683/how-can-i-know-that-ubuntu-18-04-bionics-latest-openssl-is-really-1-1-1n
](https://serverfault.com/questions/1096683/how-can-i-know-that-ubuntu-18-04-bionics-latest-openssl-is-really-1-1-1n#:~:text=,fix%20to%20their%20chosen%20version)
All Sources
[
![](https://www.google.com/s2/favicons?domain=https://lists.debian.org&sz=32)
lists.debian
](https://lists.debian.org/debian-security-announce/2014/msg00071.html#:~:text=For%20the%20stable%20distribution%20,2%2Bdeb7u5)[
![](https://www.google.com/s2/favicons?domain=https://security-tracker.debian.org&sz=32)
security...er.debian
](https://security-tracker.debian.org/tracker/CVE-2014-0160#:~:text=Name%20CVE,Debian%20ELTS%2C%20%208%20Red)[
helpdesk.kaseya
](https://helpdesk.kaseya.com/hc/en-gb/articles/4407522717329-CVE-2014-0160-OpenSSL-Heartbleed-Vulnerability#:~:text=If%20CentOS6%2C%20apply%20Unitrends%20security,42.el6)[
![](https://www.google.com/s2/favicons?domain=https://www.suse.com&sz=32)
suse
](https://www.suse.com/security/cve/CVE-2020-1971.html#:~:text=CVE,21.el7_9%3B%20openssl)[
![](https://www.google.com/s2/favicons?domain=https://linuxsecurity.com&sz=32)
linuxsecurity
](https://linuxsecurity.com/advisories/scilinux/scilinux-slsa-2020-5566-1-important-openssl-on-sl7-x-x86-64-14-12-13#:~:text=Update%20linuxsecurity,21.el7_9.i686.rpm)[
![](https://www.google.com/s2/favicons?domain=https://openssl-library.org&sz=32)
openssl-library
](https://openssl-library.org/news/timeline/#:~:text=Release%20and%20Advisory%20Timeline%20,Truncated%20packet%20could)[
![](https://www.google.com/s2/favicons?domain=https://ubuntu.com&sz=32)
ubuntu
](https://ubuntu.com/security/CVE-2024-39573#:~:text=22)[
![](https://www.google.com/s2/favicons?domain=https://nvd.nist.gov&sz=32)
nvd.nist
](https://nvd.nist.gov/vuln/detail/cve-2017-1000367#:~:text=Todd%20Miller%27s%20sudo%20version%201,function)[
![](https://www.google.com/s2/favicons?domain=https://security.snyk.io&sz=32)
security.snyk
](https://security.snyk.io/vuln/SNYK-DEBIAN9-SUDO-406955#:~:text=Race%20Condition%20in%20sudo%20,2%20or%20higher.%20NVD%20Description)[
![](https://www.google.com/s2/favicons?domain=https://askubuntu.com&sz=32)
askubuntu
](https://askubuntu.com/questions/528101/what-is-the-cve-2014-6271-bash-vulnerability-shellshock-and-how-do-i-fix-it#:~:text=dpkg%20,Version)[
![](https://www.google.com/s2/favicons?domain=https://lists.fedoraproject.org&sz=32)
lists.fedoraproject
](https://lists.fedoraproject.org/archives/list/package-announce%40lists.fedoraproject.org/message/RBUUWUGXVILQXVWEOU7N42ICHPJNAEUP/#:~:text=%5BSECURITY%5D%20Fedora%2034%20Update%3A%20glibc,11%202021%20Arjun%20Shankar)[
![](https://www.google.com/s2/favicons?domain=https://www.openwall.com&sz=32)
openwall
](https://www.openwall.com/lists/oss-security/2024/04/17/9#:~:text=Public,31)[
alas.aws.amazon
](https://alas.aws.amazon.com/AL2/ALAS2-2020-1388.html#:~:text=aarch64%3A%20systemd,57.amzn2.0.12.aarch64)[
security.alpinelinux
](https://security.alpinelinux.org/vuln/CVE-2020-28928#:~:text=musl%20%20%2038%201.2.2_pre2,tracker%200.9.1%20%E2%80%94%20Source%20code)[
![](https://www.google.com/s2/favicons?domain=https://serverfault.com&sz=32)
serverfault
](https://serverfault.com/questions/1096683/how-can-i-know-that-ubuntu-18-04-bionics-latest-openssl-is-really-1-1-1n#:~:text=,fix%20to%20their%20chosen%20version)

View File

@@ -0,0 +1,160 @@
## Product Advisory: Deterministic VEX-first vulnerability verdicts with CycloneDX 1.7
### 1) The problem you are solving
Modern scanners produce a long list of “components with known CVEs,” but that list is routinely misleading because it ignores *context*: whether the vulnerable code is shipped, configured, reachable, mitigated, or already fixed via backport. Teams then waste time on false positives, duplicate findings, and non-actionable noise.
A **VEX-first** approach solves this by attaching *exploitability/impact assertions* to SBOM components. In CycloneDX, this is expressed via the **Vulnerability / Analysis** model (often used as VEX), which can declare that a component is **not affected**, **under investigation/in triage**, **exploitable/affected**, or **resolved/fixed**, along with rationale/justification and other details. CycloneDX explicitly frames this as “vulnerability exploitability” context, including a `state` and a `justification` for why a vulnerability is (or isnt) a practical risk. ([cyclonedx.org][1])
The core product challenge is therefore:
* You will ingest **multiple statements** (vendors, distros, internal security, runtime evidence) that may **conflict**.
* Those statements may be **conditional** (only affected on certain OS, feature flags, build options).
* You must produce a **single stable, explainable verdict** per (product, vuln), and do so **deterministically** so audits and diffs are reproducible.
---
### 2) Product intent and outcomes
**Primary outcome:** Reduce noise while increasing trust: every suppression or escalation is backed by evidence and explainable logic.
**What “good” looks like:**
* Fewer alerts, but higher signal.
* Each vuln has a clear **final verdict** plus **reason chain** (“why this was marked not_affected/fixed/affected”).
* Deterministic replay: the same inputs produce the same outputs.
---
### 3) Recommended data contract (CycloneDX 1.7 aligned)
Use CycloneDX 1.7 as the canonical interchange for impact/exploitability assertions:
* **SBOM**: components + dependencies (CycloneDX and/or SPDX)
* **Vulnerability entries** with **analysis** fields:
* `analysis.state` (status in context) and `analysis.justification` (why), as described in CycloneDXs exploitability use case. ([cyclonedx.org][1])
* Optional ingress from **OpenVEX** or CSAF; normalize into CycloneDX analysis semantics (OpenVEX defines the commonly used status set `not_affected / affected / fixed / under_investigation`, and requires justification in `not_affected` cases). ([GitHub][2])
Graph relationships (if you use SPDX 3.0.1 as your internal graph layer):
* Model dependencies and containment via SPDX `Relationship` and `RelationshipType`, which formalize “Element A RELATIONSHIP Element B” semantics used to compute transitive impact. ([SPDX][3])
---
### 4) Product behavior guidelines
#### A. Single “Risk Verdict” per vuln, backed by evidence
Expose one final verdict per vulnerability at the product level, with an expandable “proof” pane:
* Inputs considered (SBOM nodes, relationship paths, VEX statements, conditions).
* Merge logic explanation (how conflicts were resolved).
* Timestamped lineage: which feed/source asserted what.
#### B. Quiet-by-design UX
* Default views show only items needing action: **Affected/Exploitable**, and **Under Investigation** with age/timeouts.
* “Not affected” and “Fixed/Resolved” are accessible but not front-and-center; they primarily serve audit and trust.
#### C. Diff-aware notifications
Notify only on **meaningful transitions** (e.g., Unknown→Affected, Affected→Fixed), not on every feed refresh.
---
### 5) Development guidelines (deterministic resolver)
#### A. Normalize identifiers first
Create a strict canonical key for matching “the same component” across SBOMs and VEX:
1. prefer **purl**, then **CPE**, then (name, version, supplier).
2. persist alias mappings (vendor naming variance is normal).
#### B. Represent the world as two layers
1. **Graph layer** (what is shipped/depends-on/contains what)
2. **Assertion layer** (CycloneDX 1.7 vulnerability analysis statements, plus optional runtime/reachability evidence)
Do not mix them—keep assertions as immutable facts that the resolver evaluates.
#### C. Condition evaluation must be total and deterministic
For each assertion, evaluate conditions against a frozen `Context`:
* platform (OS/distro/arch), build flags, enabled features, packaging mode
* runtime signals (if used) must be versioned and hashed like any other input
If a condition cannot be evaluated, treat it explicitly as **Unknown**, not false.
#### D. Merge conflicts via a documented lattice
Define a monotonic merge function that is:
* **commutative** (order independent),
* **idempotent** (reapplying doesnt change),
* **associative** (supports streaming/parallel merges).
A pragmatic priority (adjust to your policy):
1. **Fixed/Resolved** (with evidence of fix scope)
2. **Not affected** (with valid justification and conditions satisfied)
3. **Affected/Exploitable**
4. **Under investigation / In triage**
5. **Unknown**
CycloneDXs exploitability model explicitly supports “state + justification” to make “not affected” meaningful, not a hand-wave. ([cyclonedx.org][1])
#### E. Propagation rules must be explicit
Decide and document how assertions propagate across the dependency graph:
* When a dependency is **Affected**, does the product become Affected automatically? (Typically yes if the dependency is shipped and used, unless a product-level assertion says otherwise.)
* When a dependency is **Not affected** due to “code removed before shipping,” does the product inherit Not affected? (Often yes, but only if you can prove the affected code path is absent for the shipped artifact.)
* Keep propagation rules versioned to avoid “policy drift” breaking deterministic replay.
#### F. Always emit a proof object
For every final verdict emit:
* contributing assertions (source IDs), condition evaluations, merge steps
* the graph path(s) that made it relevant (SPDX Relationship chain or CycloneDX dependency references)
This proof is what lets you be quiet-by-design without losing auditability.
---
### 6) Interop guidance (OpenVEX / CSAF → CycloneDX 1.7)
If you ingest OpenVEX:
* Map OpenVEX status to CycloneDX analysis state (policy-defined mapping).
* Enforce OpenVEX minimums: `not_affected` should have a justification/impact statement. ([GitHub][2])
If you ingest CSAF advisories:
* Treat them as another assertion source; do not let them overwrite higher-confidence internal evidence without explicit precedence rules.
---
### 7) Testing and rollout checklist
* **Golden test vectors**: fixed input bundles (SBOM + assertions + context) with expected verdicts.
* **Determinism tests**: shuffle assertion ordering; results must be identical.
* **Regression diffs**: store prior proofs; verify only intended transitions occur after feed updates.
* **Adversarial cases**: conflicting assertions, partial conditions, alias mismatches, missing dependency edges.
---
### 8) Common failure modes to avoid
* Treating “not affected” as a suppression without requiring justification.
* Allowing “latest feed wins” behavior (non-deterministic and unauditable).
* Mixing runtime telemetry directly into SBOM identity (breaks replay).
* Implicit propagation rules (different engineers will interpret differently; results drift).
If you want, I can also provide a short, implementation-ready “resolver contract” (types, verdict lattice, proof schema) that is CycloneDX 1.7-centric while remaining neutral to whether you store the graph as CycloneDX dependencies or SPDX 3.0.1 relationships.
[1]: https://cyclonedx.org/use-cases/vulnerability-exploitability/?utm_source=chatgpt.com "Security Use Case: Vulnerability Exploitability"
[2]: https://github.com/openvex/spec/blob/main/OPENVEX-SPEC.md?utm_source=chatgpt.com "spec/OPENVEX-SPEC.md at main"
[3]: https://spdx.github.io/spdx-spec/v3.0.1/model/Core/Classes/Relationship/?utm_source=chatgpt.com "Relationship - SPDX Specification 3.0.1"

View File

@@ -0,0 +1,115 @@
Heres a simple, highsignal pattern you can drop into your security product: **gate AI remediation/explanations behind an “Evidence Coverage” badge**—and hide suggestions when coverage is weak.
---
### What this solves (plain English)
AI advice is only trustworthy when its grounded in real evidence. If your scan only sees half the picture, AI “fixes” become noise. A visible coverage badge makes this explicit and keeps the UI quiet until youve got enough facts.
---
### What “Evidence Coverage” means
Score = % of the verdicts required facts present, e.g., do we have:
* **Reachability** (is the vulnerable code/path actually callable in this artifact/runtime?)
* **VEX** (vendor/product statements: affected/notaffected/underinvestigation)
* **Runtime** (telemetry, process trees, loaded libs, eBPF hooks)
* **Exploit signals** (known exploits, KEV, EPSS tier, inthewild intel)
* **Patch/backport proof** (distro backports, symbols, diff/BuildID match)
* **Provenance** (intoto/DSSE attestations, signer trust)
* **Environment match** (kernel/os/distro/package set parity)
* **Differential context** (did this change since last release?)
Each fact bucket contributes weighted points → a 0100% **Coverage** score.
---
### UX rule of thumb
* **<60%**: Hide AI suggestions by default. Show a muted badge Coverage 41% add sources to unlock guidance.”
* **6079%**: Collapse AI panel; allow manual expand with a caution label. Every sentence shows its **citations**.
* **≥80%**: Show AI remediation by default with a green badge and inline evidence chips.
* **100%**: Add a subtle High confidence ribbon + export proof link.
---
### Minimal UI components
* A small badge next to each finding: `Coverage 82%` (click drawer).
* Drawer tabs: **Sources**, **Why we think its reachable**, **Counterevidence**, **Gaps**.
* Fill the gaps callouts (e.g., Attach VEX”, Enable runtime sensor”, Upload SBOM”).
---
### Copy you can reuse
* Collapsed state (low coverage):
*“Were missing runtime or VEX evidence. Add one source to unlock tailored remediation.”*
* Expanded (medium):
*“Guidance shown with caution. 3/5 evidence buckets present. See gaps →”*
---
### Data model (lean)
```yaml
coverage:
score: 0-100
buckets:
- id: reachability # call graph, symbol, entrypoints
present: true
weight: 0.22
evidence_refs: [e1,e7]
- id: vex # product/vendor statements
present: false
weight: 0.18
evidence_refs: []
- id: runtime
present: true
weight: 0.20
evidence_refs: [e3]
- id: exploit_signals
present: true
weight: 0.15
evidence_refs: [e6]
- id: patch_backport
present: false
weight: 0.15
evidence_refs: []
- id: provenance
present: true
weight: 0.10
evidence_refs: [e9]
```
---
### Policy in one line (ship this as a guard)
```pseudo
if coverage.score < 60: hide_ai()
elif coverage.score < 80: show_ai(collapsed=true, label="limited evidence")
else: show_ai(collapsed=false, label="evidence-backed")
```
---
### What the AI must output (when shown)
* **Stepbystep remediation** with **perstep citations** to the evidence drawer.
* **Why this is safe** (mentions backports, ABI risk, service impact).
* **Counterfactual**: If VEX says Not Affected do X instead.”
* **Residual risk** and **rollback** plan.
---
### How to reach ≥80% more often
* Autorequest missing inputs (“Upload maintainer VEX / Turn on runtime for 24h”).
* Fetch distro backport diffs and symbol maps to close the patch/backport bucket.
* Merge SBOM + callgraph + eBPF to strengthen reachability.
---
If you want, I can draft a dropin React component (Badge + Drawer) and a tiny scoring service (C#/.NET 10) that plugs into your verdict pipeline.

File diff suppressed because it is too large Load Diff

View File

@@ -52,6 +52,7 @@ internal static class CommandFactory
root.Add(BuildAuthCommand(services, options, verboseOption, cancellationToken));
root.Add(BuildTenantsCommand(services, options, verboseOption, cancellationToken));
root.Add(BuildPolicyCommand(services, options, verboseOption, cancellationToken));
root.Add(ToolsCommandGroup.BuildToolsCommand(loggerFactory, cancellationToken));
root.Add(BuildTaskRunnerCommand(services, verboseOption, cancellationToken));
root.Add(BuildFindingsCommand(services, verboseOption, cancellationToken));
root.Add(BuildAdviseCommand(services, options, verboseOption, cancellationToken));

View File

@@ -10,13 +10,12 @@ using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.Cli.Replay;
using StellaOps.Canonicalization.Json;
using StellaOps.Canonicalization.Verification;
using StellaOps.Policy.Replay;
using StellaOps.Replay.Core;
using StellaOps.Replay.Core.Export;
using StellaOps.Testing.Manifests.Models;
using StellaOps.Testing.Manifests.Serialization;
namespace StellaOps.Cli.Commands;

View File

@@ -0,0 +1,25 @@
using System;
using System.CommandLine;
using System.Threading;
using Microsoft.Extensions.Logging;
using StellaOps.Policy;
using StellaOps.Policy.Tools;
namespace StellaOps.Cli.Commands;
internal static class ToolsCommandGroup
{
internal static Command BuildToolsCommand(ILoggerFactory loggerFactory, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(loggerFactory);
var tools = new Command("tools", "Local policy tooling and maintenance commands.");
var validationRunner = new PolicyValidationRunner(new PolicyValidationCli());
tools.Add(PolicyDslValidatorCommand.BuildCommand(validationRunner, cancellationToken));
tools.Add(PolicySchemaExporterCommand.BuildCommand(new PolicySchemaExporterRunner(), cancellationToken));
tools.Add(PolicySimulationSmokeCommand.BuildCommand(new PolicySimulationSmokeRunner(loggerFactory), cancellationToken));
return tools;
}
}

View File

@@ -0,0 +1,60 @@
using System.Collections.Immutable;
namespace StellaOps.Cli.Replay;
public sealed record RunManifest
{
public required string RunId { get; init; }
public string SchemaVersion { get; init; } = "1.0.0";
public required ImmutableArray<ArtifactDigest> ArtifactDigests { get; init; }
public ImmutableArray<SbomReference> SbomDigests { get; init; } = [];
public required FeedSnapshot FeedSnapshot { get; init; }
public required PolicySnapshot PolicySnapshot { get; init; }
public required ToolVersions ToolVersions { get; init; }
public required CryptoProfile CryptoProfile { get; init; }
public required EnvironmentProfile EnvironmentProfile { get; init; }
public long? PrngSeed { get; init; }
public required string CanonicalizationVersion { get; init; }
public required DateTimeOffset InitiatedAt { get; init; }
public string? ManifestDigest { get; init; }
}
public sealed record ArtifactDigest(
string Algorithm,
string Digest,
string? MediaType,
string? Reference);
public sealed record SbomReference(
string Format,
string Digest,
string? Uri);
public sealed record FeedSnapshot(
string FeedId,
string Version,
string Digest,
DateTimeOffset SnapshotAt);
public sealed record PolicySnapshot(
string PolicyVersion,
string LatticeRulesDigest,
ImmutableArray<string> EnabledRules);
public sealed record ToolVersions(
string ScannerVersion,
string SbomGeneratorVersion,
string ReachabilityEngineVersion,
string AttestorVersion,
ImmutableDictionary<string, string> AdditionalTools);
public sealed record CryptoProfile(
string ProfileName,
ImmutableArray<string> TrustRootIds,
ImmutableArray<string> AllowedAlgorithms);
public sealed record EnvironmentProfile(
string Name,
bool ValkeyEnabled,
string? PostgresVersion,
string? ValkeyVersion);

View File

@@ -0,0 +1,43 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Canonical.Json;
namespace StellaOps.Cli.Replay;
internal static class RunManifestSerializer
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping
};
public static string Serialize(RunManifest manifest)
{
var jsonBytes = JsonSerializer.SerializeToUtf8Bytes(manifest, JsonOptions);
var canonicalBytes = CanonJson.CanonicalizeParsedJson(jsonBytes);
return Encoding.UTF8.GetString(canonicalBytes);
}
public static RunManifest Deserialize(string json)
{
return JsonSerializer.Deserialize<RunManifest>(json, JsonOptions)
?? throw new InvalidOperationException("Failed to deserialize manifest");
}
public static string ComputeDigest(RunManifest manifest)
{
var withoutDigest = manifest with { ManifestDigest = null };
var json = Serialize(withoutDigest);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
return Convert.ToHexString(hash).ToLowerInvariant();
}
public static RunManifest WithDigest(RunManifest manifest)
=> manifest with { ManifestDigest = ComputeDigest(manifest) };
}

View File

@@ -49,8 +49,8 @@
<ProjectReference Include="../../__Libraries/StellaOps.Cryptography.DependencyInjection/StellaOps.Cryptography.DependencyInjection.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/StellaOps.Cryptography.Plugin.BouncyCastle.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Canonicalization/StellaOps.Canonicalization.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Canonical.Json/StellaOps.Canonical.Json.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.DeltaVerdict/StellaOps.DeltaVerdict.csproj" />
<ProjectReference Include="../../__Tests/__Libraries/StellaOps.Testing.Manifests/StellaOps.Testing.Manifests.csproj" />
<ProjectReference Include="../../AirGap/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy/StellaOps.AirGap.Policy.csproj" />
<ProjectReference Include="../../AirGap/StellaOps.AirGap.Importer/StellaOps.AirGap.Importer.csproj" />
<ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" />
@@ -69,6 +69,7 @@
<ProjectReference Include="../../Policy/StellaOps.PolicyDsl/StellaOps.PolicyDsl.csproj" />
<ProjectReference Include="../../Policy/__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" />
<ProjectReference Include="../../Policy/StellaOps.Policy.RiskProfile/StellaOps.Policy.RiskProfile.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Policy.Tools/StellaOps.Policy.Tools.csproj" />
<ProjectReference Include="../../Attestor/StellaOps.Attestation/StellaOps.Attestation.csproj" />
<ProjectReference Include="../../Attestor/StellaOps.Attestor.Envelope/StellaOps.Attestor.Envelope.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Infrastructure.Postgres/StellaOps.Infrastructure.Postgres.csproj" />

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using System.Reflection;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
@@ -54,8 +55,20 @@ public sealed class ScannerDownloadVerifyTests
internal static class CommandHandlersTestShim
{
public static Task VerifyBundlePublicAsync(string path, ILogger logger, CancellationToken token)
=> typeof(CommandHandlers)
.GetMethod("VerifyBundleAsync", System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Static)!
.Invoke(null, new object[] { path, logger, token }) as Task
?? Task.CompletedTask;
{
var method = typeof(CommandHandlers).GetMethod(
"VerifyBundleAsync",
BindingFlags.NonPublic | BindingFlags.Static,
binder: null,
types: new[] { typeof(string), typeof(ILogger), typeof(CancellationToken) },
modifiers: null);
if (method is null)
{
throw new MissingMethodException(nameof(CommandHandlers), "VerifyBundleAsync");
}
return method.Invoke(null, new object[] { path, logger, token }) as Task
?? Task.CompletedTask;
}
}

View File

@@ -0,0 +1,69 @@
using System;
using System.CommandLine;
using System.Linq;
using System.Threading;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.Cli.Commands;
using StellaOps.Cli.Configuration;
namespace StellaOps.Cli.Tests.Commands;
public sealed class ToolsCommandGroupTests
{
[Fact]
public void Create_ExposesToolsCommands()
{
using var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.None));
var services = new ServiceCollection().BuildServiceProvider();
var root = CommandFactory.Create(services, new StellaOpsCliOptions(), CancellationToken.None, loggerFactory);
var tools = Assert.Single(root.Subcommands, command => string.Equals(command.Name, "tools", StringComparison.Ordinal));
Assert.Contains(tools.Subcommands, command => string.Equals(command.Name, "policy-dsl-validate", StringComparison.Ordinal));
Assert.Contains(tools.Subcommands, command => string.Equals(command.Name, "policy-schema-export", StringComparison.Ordinal));
Assert.Contains(tools.Subcommands, command => string.Equals(command.Name, "policy-simulation-smoke", StringComparison.Ordinal));
}
[Fact]
public void ToolsCommand_PolicyDslValidator_HasExpectedOptions()
{
var command = BuildToolsCommand().Subcommands.First(c => c.Name == "policy-dsl-validate");
Assert.NotNull(FindOption(command, "--strict", "-s"));
Assert.NotNull(FindOption(command, "--json", "-j"));
Assert.Contains(command.Arguments, argument => string.Equals(argument.Name, "inputs", StringComparison.Ordinal));
}
[Fact]
public void ToolsCommand_PolicySchemaExporter_HasExpectedOptions()
{
var command = BuildToolsCommand().Subcommands.First(c => c.Name == "policy-schema-export");
Assert.NotNull(FindOption(command, "--output", "-o"));
Assert.NotNull(FindOption(command, "--repo-root", "-r"));
}
[Fact]
public void ToolsCommand_PolicySimulationSmoke_HasExpectedOptions()
{
var command = BuildToolsCommand().Subcommands.First(c => c.Name == "policy-simulation-smoke");
Assert.NotNull(FindOption(command, "--scenario-root", "-r"));
Assert.NotNull(FindOption(command, "--output", "-o"));
Assert.NotNull(FindOption(command, "--repo-root"));
Assert.NotNull(FindOption(command, "--fixed-time"));
}
private static Command BuildToolsCommand()
{
using var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.None));
return ToolsCommandGroup.BuildToolsCommand(loggerFactory, CancellationToken.None);
}
private static Option? FindOption(Command command, params string[] aliases)
{
return command.Options.FirstOrDefault(option =>
aliases.Any(alias => string.Equals(option.Name, alias, StringComparison.Ordinal) || option.Aliases.Contains(alias)));
}
}

View File

@@ -122,7 +122,8 @@ public sealed class CliIntegrationTests : IDisposable
// Act & Assert
var act = async () => await client.ScanAsync("slow/image:v1");
await act.Should().ThrowAsync<TimeoutException>();
await act.Should().ThrowAsync<Exception>()
.Where(ex => ex is TimeoutException || ex is TaskCanceledException);
}
[Fact]

View File

@@ -0,0 +1,65 @@
using System.Collections.Immutable;
using StellaOps.Cli.Replay;
using Xunit;
namespace StellaOps.Cli.Tests.Replay;
public sealed class RunManifestSerializerTests
{
[Fact]
public void Serialize_UsesCanonicalOrdering()
{
var manifest = CreateManifest();
var json1 = RunManifestSerializer.Serialize(manifest);
var json2 = RunManifestSerializer.Serialize(manifest);
Assert.Equal(json1, json2);
}
[Fact]
public void ComputeDigest_IsStable()
{
var manifest = CreateManifest();
var digest1 = RunManifestSerializer.ComputeDigest(manifest);
var digest2 = RunManifestSerializer.ComputeDigest(manifest);
Assert.Equal(digest1, digest2);
Assert.Equal(64, digest1.Length);
}
[Fact]
public void RoundTrip_PreservesFields()
{
var manifest = CreateManifest();
var json = RunManifestSerializer.Serialize(manifest);
var deserialized = RunManifestSerializer.Deserialize(json);
var normalized = RunManifestSerializer.Serialize(deserialized);
Assert.Equal(json, normalized);
}
private static RunManifest CreateManifest()
{
return new RunManifest
{
RunId = "run-1",
SchemaVersion = "1.0.0",
ArtifactDigests = ImmutableArray.Create(
new ArtifactDigest("sha256", new string('a', 64), "application/vnd.oci.image.layer.v1.tar", "example")),
SbomDigests = ImmutableArray.Create(
new SbomReference("cyclonedx-1.6", new string('b', 64), "sbom.json")),
FeedSnapshot = new FeedSnapshot("nvd", "2025.12.01", new string('c', 64), new DateTimeOffset(2025, 12, 1, 0, 0, 0, TimeSpan.Zero)),
PolicySnapshot = new PolicySnapshot("policy-1", new string('d', 64), ImmutableArray.Create("rule-1")),
ToolVersions = new ToolVersions("1.0.0", "1.0.0", "1.0.0", "1.0.0", ImmutableDictionary<string, string>.Empty),
CryptoProfile = new CryptoProfile("default", ImmutableArray.Create("root-1"), ImmutableArray.Create("sha256")),
EnvironmentProfile = new EnvironmentProfile("postgres-only", false, "16", null),
PrngSeed = 42,
CanonicalizationVersion = "1.0.0",
InitiatedAt = new DateTimeOffset(2025, 12, 1, 0, 0, 0, TimeSpan.Zero)
};
}
}

View File

@@ -548,12 +548,20 @@ public sealed class OsvGhsaParityRegressionTests
}
private static string ResolveFixturePath(string filename)
=> Path.Combine(ProjectFixtureDirectory, filename);
{
if (IsGhsaFixture(filename))
{
return Path.Combine(GhsaFixtureDirectory, filename);
}
return Path.Combine(ProjectFixtureDirectory, filename);
}
private static string NormalizeRecordedAt(string input)
=> RecordedAtRegex.Replace(input, "\"recordedAt\": \"#normalized#\"");
private static string ProjectFixtureDirectory { get; } = Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "Fixtures"));
private static string GhsaFixtureDirectory { get; } = Path.GetFullPath(Path.Combine(ProjectFixtureDirectory, "..", "..", "StellaOps.Concelier.Connector.Ghsa.Tests", "Fixtures"));
private static string RebuildSentinelPath => Path.Combine(ProjectFixtureDirectory, ".rebuild");
@@ -568,6 +576,10 @@ public sealed class OsvGhsaParityRegressionTests
private static string? NullIfWhitespace(string? value)
=> string.IsNullOrWhiteSpace(value) ? null : value.Trim();
private static bool IsGhsaFixture(string filename)
=> filename.Contains("raw-ghsa", StringComparison.OrdinalIgnoreCase)
|| filename.Contains(".ghsa.", StringComparison.OrdinalIgnoreCase);
private sealed record MeasurementRecord(string Instrument, long Value, IReadOnlyDictionary<string, object?> Tags);
}

View File

@@ -0,0 +1,83 @@
using System.CommandLine;
namespace Scheduler.Backfill;
public static class BackfillApp
{
public static async Task<int> RunAsync(string[] args)
{
var pgOption = new Option<string?>("--pg")
{
Description = "PostgreSQL connection string (falls back to POSTGRES_CONNECTION_STRING)."
};
var batchOption = new Option<int>("--batch")
{
Description = "Batch size for inserts (min 50).",
DefaultValueFactory = _ => 500
};
var sourceOption = new Option<FileInfo>("--source")
{
Description = "Path to NDJSON file containing GraphBuildJob payloads.",
Required = true
};
var dryRunOption = new Option<bool>("--dry-run")
{
Description = "Validate and report without inserting rows."
};
var timeoutOption = new Option<int>("--timeout-seconds")
{
Description = "Cancel the backfill after the given number of seconds (0 disables).",
DefaultValueFactory = _ => 0
};
var command = new RootCommand("Scheduler graph job backfill tool");
command.Add(pgOption);
command.Add(batchOption);
command.Add(sourceOption);
command.Add(dryRunOption);
command.Add(timeoutOption);
command.SetAction(async (parseResult, cancellationToken) =>
{
try
{
var pg = parseResult.GetValue(pgOption);
var batch = parseResult.GetValue(batchOption);
var source = parseResult.GetValue(sourceOption);
var dryRun = parseResult.GetValue(dryRunOption);
var timeoutSeconds = parseResult.GetValue(timeoutOption);
if (source is null)
{
Console.Error.WriteLine("[FAIL] --source is required.");
return 1;
}
var options = BackfillOptions.From(pg, batch, dryRun, source.FullName, timeoutSeconds);
var runner = new BackfillRunner(options, Console.WriteLine);
if (options.Timeout is null)
{
await runner.RunAsync(cancellationToken).ConfigureAwait(false);
return 0;
}
using var cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
cts.CancelAfter(options.Timeout.Value);
await runner.RunAsync(cts.Token).ConfigureAwait(false);
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine($"[FAIL] {ex.Message}");
return 1;
}
});
return await command.Parse(args).InvokeAsync().ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,148 @@
using System.Text.Json;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Infrastructure.Postgres.Options;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Persistence.Postgres;
using StellaOps.Scheduler.Persistence.Postgres.Repositories;
namespace Scheduler.Backfill;
public sealed record BackfillOptions(
string PostgresConnectionString,
int BatchSize,
bool DryRun,
string SourcePath,
TimeSpan? Timeout)
{
public static BackfillOptions From(string? pgConn, int batchSize, bool dryRun, string sourcePath, int timeoutSeconds)
{
var pg = string.IsNullOrWhiteSpace(pgConn)
? Environment.GetEnvironmentVariable("POSTGRES_CONNECTION_STRING")
: pgConn;
if (string.IsNullOrWhiteSpace(pg))
{
throw new ArgumentException("PostgreSQL connection string is required (--pg or POSTGRES_CONNECTION_STRING)");
}
if (string.IsNullOrWhiteSpace(sourcePath))
{
throw new ArgumentException("Source file path is required (--source)");
}
var normalizedBatch = Math.Max(50, batchSize);
var timeout = timeoutSeconds > 0 ? TimeSpan.FromSeconds(timeoutSeconds) : (TimeSpan?)null;
return new BackfillOptions(pg, normalizedBatch, dryRun, sourcePath, timeout);
}
}
public sealed class BackfillRunner
{
private readonly BackfillOptions _options;
private readonly Action<string> _log;
private readonly SchedulerDataSource _dataSource;
private readonly IGraphJobRepository _graphJobRepository;
public BackfillRunner(BackfillOptions options, Action<string>? log = null)
{
_options = options;
_log = log ?? (_ => { });
_dataSource = new SchedulerDataSource(Options.Create(new PostgresOptions
{
ConnectionString = options.PostgresConnectionString,
SchemaName = "scheduler",
CommandTimeoutSeconds = 30,
AutoMigrate = false
}), NullLogger<SchedulerDataSource>.Instance);
_graphJobRepository = new GraphJobRepository(_dataSource);
}
public async Task RunAsync(CancellationToken cancellationToken)
{
if (!File.Exists(_options.SourcePath))
{
throw new FileNotFoundException($"Source file '{_options.SourcePath}' does not exist.", _options.SourcePath);
}
_log($"Graph job backfill starting (dry-run={_options.DryRun}, batch={_options.BatchSize}).");
var batch = new List<GraphBuildJob>(_options.BatchSize);
var total = 0;
var inserted = 0;
await foreach (var job in ReadJobsAsync(_options.SourcePath, cancellationToken))
{
batch.Add(job);
total++;
if (batch.Count >= _options.BatchSize)
{
inserted += await ProcessBatchAsync(batch, cancellationToken).ConfigureAwait(false);
batch.Clear();
}
}
if (batch.Count > 0)
{
inserted += await ProcessBatchAsync(batch, cancellationToken).ConfigureAwait(false);
}
_log($"Backfill completed. Jobs processed: {total}. Jobs inserted: {inserted}.");
}
private async Task<int> ProcessBatchAsync(List<GraphBuildJob> batch, CancellationToken cancellationToken)
{
if (_options.DryRun)
{
_log($"Dry run: would insert {batch.Count} jobs.");
return 0;
}
foreach (var job in batch)
{
await _graphJobRepository.InsertAsync(job, cancellationToken).ConfigureAwait(false);
}
_log($"Inserted {batch.Count} jobs.");
return batch.Count;
}
private static async IAsyncEnumerable<GraphBuildJob> ReadJobsAsync(string path, [System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken)
{
using var stream = File.OpenRead(path);
using var reader = new StreamReader(stream);
var lineNumber = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var line = await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false);
if (line is null)
{
break;
}
lineNumber++;
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
GraphBuildJob job;
try
{
job = CanonicalJsonSerializer.Deserialize<GraphBuildJob>(line);
}
catch (JsonException ex)
{
throw new InvalidOperationException($"Failed to parse GraphBuildJob on line {lineNumber}: {ex.Message}");
}
yield return job;
}
}
}

View File

@@ -1,130 +1,3 @@
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Npgsql;
using Scheduler.Backfill;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Persistence.Postgres;
using StellaOps.Scheduler.Persistence.Postgres.Repositories;
using StellaOps.Infrastructure.Postgres.Options;
var parsed = ParseArgs(args);
var options = BackfillOptions.From(parsed.PostgresConnection, parsed.BatchSize, parsed.DryRun);
var runner = new BackfillRunner(options);
await runner.RunAsync();
return 0;
static BackfillCliOptions ParseArgs(string[] args)
{
string? pg = null;
int batch = 500;
bool dryRun = false;
for (var i = 0; i < args.Length; i++)
{
switch (args[i])
{
case "--pg" or "-p":
pg = NextValue(args, ref i);
break;
case "--batch":
batch = int.TryParse(NextValue(args, ref i), out var b) ? b : 500;
break;
case "--dry-run":
dryRun = true;
break;
default:
break;
}
}
return new BackfillCliOptions(pg, batch, dryRun);
}
static string NextValue(string[] args, ref int index)
{
if (index + 1 >= args.Length)
{
return string.Empty;
}
index++;
return args[index];
}
internal sealed record BackfillCliOptions(
string? PostgresConnection,
int BatchSize,
bool DryRun);
internal sealed record BackfillOptions(
string PostgresConnectionString,
int BatchSize,
bool DryRun)
{
public static BackfillOptions From(string? pgConn, int batchSize, bool dryRun)
{
var pg = string.IsNullOrWhiteSpace(pgConn)
? Environment.GetEnvironmentVariable("POSTGRES_CONNECTION_STRING")
: pgConn;
if (string.IsNullOrWhiteSpace(pg))
{
throw new ArgumentException("PostgreSQL connection string is required (--pg or POSTGRES_CONNECTION_STRING)");
}
return new BackfillOptions(pg!, Math.Max(50, batchSize), dryRun);
}
}
internal sealed class BackfillRunner
{
private readonly BackfillOptions _options;
private readonly NpgsqlDataSource _pg;
private readonly SchedulerDataSource _dataSource;
private readonly IGraphJobRepository _graphJobRepository;
public BackfillRunner(BackfillOptions options)
{
_options = options;
_pg = NpgsqlDataSource.Create(options.PostgresConnectionString);
_dataSource = new SchedulerDataSource(Options.Create(new PostgresOptions
{
ConnectionString = options.PostgresConnectionString,
SchemaName = "scheduler",
CommandTimeoutSeconds = 30,
AutoMigrate = false
}), NullLogger<SchedulerDataSource>.Instance);
_graphJobRepository = new GraphJobRepository(_dataSource);
}
public async Task RunAsync()
{
Console.WriteLine($"Postgres graph job backfill starting (dry-run={_options.DryRun})");
// Placeholder: actual copy logic would map legacy export to new Postgres graph_jobs rows.
if (_options.DryRun)
{
Console.WriteLine("Dry run: no changes applied.");
return;
}
await using var conn = await _dataSource.OpenSystemConnectionAsync(CancellationToken.None);
await using var tx = await conn.BeginTransactionAsync();
// Example: seed an empty job to validate wiring
var sample = new GraphBuildJob(
id: Guid.NewGuid().ToString(),
tenantId: "tenant",
sbomId: "sbom",
sbomVersionId: "sbom-ver",
sbomDigest: "sha256:dummy",
status: GraphJobStatus.Pending,
trigger: GraphBuildJobTrigger.Manual,
createdAt: DateTimeOffset.UtcNow);
await _graphJobRepository.InsertAsync(sample, CancellationToken.None);
await tx.CommitAsync();
Console.WriteLine("Backfill completed (sample insert).");
}
}
return await BackfillApp.RunAsync(args);

View File

@@ -14,7 +14,7 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Npgsql" />
<PackageReference Include="System.CommandLine" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,61 @@
using System;
using System.IO;
using System.Threading.Tasks;
using FluentAssertions;
using Scheduler.Backfill;
using StellaOps.Scheduler.Models;
using Xunit;
namespace StellaOps.Scheduler.Backfill.Tests;
public sealed class BackfillOptionsTests
{
[Fact]
public void From_ClampsBatchSize()
{
var options = BackfillOptions.From(
pgConn: "Host=localhost;Username=stella;Password=secret;Database=scheduler",
batchSize: 10,
dryRun: true,
sourcePath: "jobs.ndjson",
timeoutSeconds: 0);
options.BatchSize.Should().Be(50);
}
[Fact]
public async Task Runner_DryRun_ParsesNdjson()
{
var job = new GraphBuildJob(
id: "job-1",
tenantId: "tenant",
sbomId: "sbom",
sbomVersionId: "sbom-ver",
sbomDigest: "sha256:abc",
status: GraphJobStatus.Pending,
trigger: GraphBuildJobTrigger.Manual,
createdAt: new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero));
var json = CanonicalJsonSerializer.Serialize(job);
var tempPath = Path.GetTempFileName();
await File.WriteAllTextAsync(tempPath, json + Environment.NewLine);
try
{
var options = new BackfillOptions(
PostgresConnectionString: "Host=localhost;Username=stella;Password=secret;Database=scheduler",
BatchSize: 50,
DryRun: true,
SourcePath: tempPath,
Timeout: null);
var runner = new BackfillRunner(options);
await runner.RunAsync(default);
}
finally
{
File.Delete(tempPath);
}
}
}

View File

@@ -8,6 +8,7 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="System.CommandLine" />
<ProjectReference Include="../../Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/StellaOps.Concelier.Connector.Osv.csproj" />
<ProjectReference Include="../../Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/StellaOps.Concelier.Connector.Ghsa.csproj" />
<ProjectReference Include="../../Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/StellaOps.Concelier.Connector.Nvd.csproj" />

View File

@@ -0,0 +1,96 @@
using System.CommandLine;
namespace StellaOps.Tools.FixtureUpdater;
public static class FixtureUpdaterApp
{
public static async Task<int> RunAsync(string[] args)
{
var repoRootOption = new Option<DirectoryInfo?>("--repo-root")
{
Description = "Repository root used to resolve default fixture paths."
};
var osvFixturesOption = new Option<DirectoryInfo?>("--osv-fixtures")
{
Description = "Directory containing OSV fixtures (raw and snapshot outputs)."
};
var ghsaFixturesOption = new Option<DirectoryInfo?>("--ghsa-fixtures")
{
Description = "Directory containing GHSA fixtures (raw and snapshot outputs)."
};
var nvdFixturesOption = new Option<DirectoryInfo?>("--nvd-fixtures")
{
Description = "Directory containing NVD fixtures (snapshot outputs)."
};
var fixedTimeOption = new Option<DateTimeOffset>("--fixed-time")
{
Description = "Fixed timestamp used for deterministic fixture generation.",
DefaultValueFactory = _ => FixtureUpdaterDefaults.DefaultFixedTime
};
var command = new RootCommand("Rewrites Concelier OSV/GHSA/NVD fixtures deterministically.");
command.Add(repoRootOption);
command.Add(osvFixturesOption);
command.Add(ghsaFixturesOption);
command.Add(nvdFixturesOption);
command.Add(fixedTimeOption);
command.SetAction((parseResult, _) =>
{
var repoRoot = parseResult.GetValue(repoRootOption);
var osvFixtures = parseResult.GetValue(osvFixturesOption);
var ghsaFixtures = parseResult.GetValue(ghsaFixturesOption);
var nvdFixtures = parseResult.GetValue(nvdFixturesOption);
var fixedTime = parseResult.GetValue(fixedTimeOption);
var resolvedRepoRoot = RepoRootLocator.TryResolve(repoRoot?.FullName);
if (resolvedRepoRoot is null && (osvFixtures is null || ghsaFixtures is null || nvdFixtures is null))
{
Console.Error.WriteLine("[FixtureUpdater] Unable to resolve repo root. Provide --repo-root or explicit fixture paths.");
return Task.FromResult(2);
}
var resolvedOsv = ResolvePath(osvFixtures?.FullName, resolvedRepoRoot, FixtureUpdaterDefaults.OsvFixturesRelative);
var resolvedGhsa = ResolvePath(ghsaFixtures?.FullName, resolvedRepoRoot, FixtureUpdaterDefaults.GhsaFixturesRelative);
var resolvedNvd = ResolvePath(nvdFixtures?.FullName, resolvedRepoRoot, FixtureUpdaterDefaults.NvdFixturesRelative);
if (resolvedOsv is null || resolvedGhsa is null || resolvedNvd is null)
{
Console.Error.WriteLine("[FixtureUpdater] Fixture paths could not be resolved. Provide --osv-fixtures, --ghsa-fixtures, and --nvd-fixtures explicitly.");
return Task.FromResult(2);
}
var options = new FixtureUpdaterOptions(
resolvedRepoRoot,
resolvedOsv,
resolvedGhsa,
resolvedNvd,
fixedTime);
var runner = new FixtureUpdaterRunner(options, Console.WriteLine, Console.Error.WriteLine);
var result = runner.Run();
return Task.FromResult(result.ErrorCount == 0 ? 0 : 1);
});
return await command.Parse(args).InvokeAsync().ConfigureAwait(false);
}
private static string? ResolvePath(string? overridePath, string? repoRoot, string relativePath)
{
if (!string.IsNullOrWhiteSpace(overridePath))
{
return Path.GetFullPath(overridePath);
}
if (string.IsNullOrWhiteSpace(repoRoot))
{
return null;
}
return Path.GetFullPath(Path.Combine(repoRoot, relativePath));
}
}

View File

@@ -0,0 +1,532 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Concelier.Connector.Common;
using StellaOps.Concelier.Connector.Ghsa;
using StellaOps.Concelier.Connector.Ghsa.Internal;
using StellaOps.Concelier.Connector.Nvd;
using StellaOps.Concelier.Connector.Osv;
using StellaOps.Concelier.Connector.Osv.Internal;
using StellaOps.Concelier.Documents;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage;
namespace StellaOps.Tools.FixtureUpdater;
public sealed record FixtureUpdaterOptions(
string? RepoRoot,
string OsvFixturesPath,
string GhsaFixturesPath,
string NvdFixturesPath,
DateTimeOffset FixedTime);
public readonly record struct FixtureUpdateResult(int ErrorCount);
public sealed class FixtureUpdaterRunner
{
private readonly FixtureUpdaterOptions _options;
private readonly Action<string> _info;
private readonly Action<string> _error;
private readonly FixtureDeterminism _determinism;
private readonly JsonSerializerOptions _serializerOptions;
private int _errors;
public FixtureUpdaterRunner(FixtureUpdaterOptions options, Action<string>? info = null, Action<string>? error = null)
{
_options = options;
_info = info ?? (_ => { });
_error = error ?? (_ => { });
_determinism = new FixtureDeterminism(options.FixedTime);
_serializerOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
}
public FixtureUpdateResult Run()
{
_errors = 0;
Directory.CreateDirectory(_options.OsvFixturesPath);
Directory.CreateDirectory(_options.GhsaFixturesPath);
Directory.CreateDirectory(_options.NvdFixturesPath);
RewriteOsvFixtures(_options.OsvFixturesPath);
RewriteSnapshotFixtures(_options.OsvFixturesPath);
RewriteGhsaFixtures(_options.GhsaFixturesPath);
RewriteCreditParityFixtures(_options.GhsaFixturesPath, _options.NvdFixturesPath);
return new FixtureUpdateResult(_errors);
}
private void RewriteOsvFixtures(string fixturesPath)
{
var rawPath = Path.Combine(fixturesPath, "osv-ghsa.raw-osv.json");
if (!File.Exists(rawPath))
{
ReportError($"[FixtureUpdater] OSV raw fixture missing: {rawPath}");
return;
}
JsonDocument document;
try
{
document = JsonDocument.Parse(File.ReadAllText(rawPath));
}
catch (JsonException ex)
{
ReportError($"[FixtureUpdater] Failed to parse OSV raw fixture '{rawPath}': {ex.Message}");
return;
}
using (document)
{
if (document.RootElement.ValueKind != JsonValueKind.Array)
{
ReportError($"[FixtureUpdater] OSV raw fixture '{rawPath}' is not a JSON array.");
return;
}
var advisories = new List<Advisory>();
var index = 0;
foreach (var element in document.RootElement.EnumerateArray())
{
index++;
OsvVulnerabilityDto? dto;
try
{
dto = JsonSerializer.Deserialize<OsvVulnerabilityDto>(element.GetRawText(), _serializerOptions);
}
catch (JsonException ex)
{
ReportError($"[FixtureUpdater] OSV entry {index} parse failed in '{rawPath}': {ex.Message}");
continue;
}
if (dto is null)
{
ReportError($"[FixtureUpdater] OSV entry {index} was empty in '{rawPath}'.");
continue;
}
var identifier = dto.Id ?? $"osv-entry-{index}";
var ecosystem = dto.Affected?.FirstOrDefault()?.Package?.Ecosystem ?? "unknown";
var capturedAt = dto.Modified ?? dto.Published ?? _determinism.UtcNow;
var uri = new Uri($"https://osv.dev/vulnerability/{identifier}");
var documentRecord = new DocumentRecord(
_determinism.CreateGuid("osv-document", identifier),
OsvConnectorPlugin.SourceName,
uri.ToString(),
capturedAt,
"fixture-sha",
DocumentStatuses.PendingMap,
"application/json",
null,
new Dictionary<string, string>(StringComparer.Ordinal)
{
["osv.ecosystem"] = ecosystem,
},
null,
capturedAt,
null,
null);
var payload = DocumentObject.Parse(element.GetRawText());
var dtoRecord = new DtoRecord(
_determinism.CreateGuid("osv-dto", identifier),
documentRecord.Id,
OsvConnectorPlugin.SourceName,
"osv.v1",
payload,
capturedAt);
var advisory = OsvMapper.Map(dto, documentRecord, dtoRecord, ecosystem);
advisories.Add(advisory);
}
advisories.Sort((left, right) => string.Compare(left.AdvisoryKey, right.AdvisoryKey, StringComparison.Ordinal));
var snapshot = SnapshotSerializer.ToSnapshot(advisories);
var outputPath = Path.Combine(fixturesPath, "osv-ghsa.osv.json");
File.WriteAllText(outputPath, snapshot);
_info($"[FixtureUpdater] Updated {outputPath}");
}
}
private void RewriteSnapshotFixtures(string fixturesPath)
{
var baselinePublished = new DateTimeOffset(2025, 1, 5, 12, 0, 0, TimeSpan.Zero);
var baselineModified = new DateTimeOffset(2025, 1, 8, 6, 30, 0, TimeSpan.Zero);
var baselineFetched = new DateTimeOffset(2025, 1, 8, 7, 0, 0, TimeSpan.Zero);
var cases = new (string Ecosystem, string Purl, string PackageName, string SnapshotFile)[]
{
("npm", "pkg:npm/%40scope%2Fleft-pad", "@scope/left-pad", "osv-npm.snapshot.json"),
("PyPI", "pkg:pypi/requests", "requests", "osv-pypi.snapshot.json"),
};
foreach (var (ecosystem, purl, packageName, snapshotFile) in cases)
{
var dto = new OsvVulnerabilityDto
{
Id = $"OSV-2025-{ecosystem}-0001",
Summary = $"{ecosystem} package vulnerability",
Details = $"Detailed description for {ecosystem} package {packageName}.",
Published = baselinePublished,
Modified = baselineModified,
Aliases = new[] { $"CVE-2025-11{ecosystem.Length}", $"GHSA-{ecosystem.Length}abc-{ecosystem.Length}def-{ecosystem.Length}ghi" },
Related = new[] { $"OSV-RELATED-{ecosystem}-42" },
References = new[]
{
new OsvReferenceDto { Url = $"https://example.com/{ecosystem}/advisory", Type = "ADVISORY" },
new OsvReferenceDto { Url = $"https://example.com/{ecosystem}/fix", Type = "FIX" },
},
Severity = new[]
{
new OsvSeverityDto { Type = "CVSS_V3", Score = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" },
},
Affected = new[]
{
new OsvAffectedPackageDto
{
Package = new OsvPackageDto
{
Ecosystem = ecosystem,
Name = packageName,
Purl = purl,
},
Ranges = new[]
{
new OsvRangeDto
{
Type = "SEMVER",
Events = new[]
{
new OsvEventDto { Introduced = "0" },
new OsvEventDto { Fixed = "2.0.0" },
},
},
},
Versions = new[] { "1.0.0", "1.5.0" },
EcosystemSpecific = JsonDocument.Parse("{\"severity\":\"high\"}").RootElement.Clone(),
},
},
DatabaseSpecific = JsonDocument.Parse("{\"source\":\"osv.dev\"}").RootElement.Clone(),
};
var identifier = dto.Id ?? $"snapshot-{ecosystem}";
var document = new DocumentRecord(
_determinism.CreateGuid("osv-snapshot-document", identifier),
OsvConnectorPlugin.SourceName,
$"https://osv.dev/vulnerability/{dto.Id}",
baselineFetched,
"fixture-sha",
DocumentStatuses.PendingParse,
"application/json",
null,
new Dictionary<string, string>(StringComparer.Ordinal) { ["osv.ecosystem"] = ecosystem },
null,
baselineModified,
null);
var payload = DocumentObject.Parse(JsonSerializer.Serialize(dto, _serializerOptions));
var dtoRecord = new DtoRecord(
_determinism.CreateGuid("osv-snapshot-dto", identifier),
document.Id,
OsvConnectorPlugin.SourceName,
"osv.v1",
payload,
baselineModified);
var advisory = OsvMapper.Map(dto, document, dtoRecord, ecosystem);
var snapshot = SnapshotSerializer.ToSnapshot(advisory);
var outputPath = Path.Combine(fixturesPath, snapshotFile);
File.WriteAllText(outputPath, snapshot);
_info($"[FixtureUpdater] Updated {outputPath}");
}
}
private void RewriteGhsaFixtures(string fixturesPath)
{
var rawPath = Path.Combine(fixturesPath, "osv-ghsa.raw-ghsa.json");
if (!File.Exists(rawPath))
{
ReportError($"[FixtureUpdater] GHSA raw fixture missing: {rawPath}");
return;
}
JsonDocument document;
try
{
document = JsonDocument.Parse(File.ReadAllText(rawPath));
}
catch (JsonException ex)
{
ReportError($"[FixtureUpdater] Failed to parse GHSA raw fixture '{rawPath}': {ex.Message}");
return;
}
using (document)
{
if (document.RootElement.ValueKind != JsonValueKind.Array)
{
ReportError($"[FixtureUpdater] GHSA raw fixture '{rawPath}' is not a JSON array.");
return;
}
var advisories = new List<Advisory>();
var index = 0;
foreach (var element in document.RootElement.EnumerateArray())
{
index++;
GhsaRecordDto dto;
try
{
dto = GhsaRecordParser.Parse(Encoding.UTF8.GetBytes(element.GetRawText()));
}
catch (JsonException ex)
{
ReportError($"[FixtureUpdater] GHSA entry {index} parse failed in '{rawPath}': {ex.Message}");
continue;
}
var identifier = string.IsNullOrWhiteSpace(dto.GhsaId) ? $"ghsa-entry-{index}" : dto.GhsaId;
var capturedAt = _determinism.UtcNow;
var uri = new Uri($"https://github.com/advisories/{identifier}");
var documentRecord = new DocumentRecord(
_determinism.CreateGuid("ghsa-document", identifier),
GhsaConnectorPlugin.SourceName,
uri.ToString(),
capturedAt,
"fixture-sha",
DocumentStatuses.PendingMap,
"application/json",
null,
new Dictionary<string, string>(StringComparer.Ordinal),
null,
capturedAt,
null,
null);
var advisory = GhsaMapper.Map(dto, documentRecord, capturedAt);
advisories.Add(advisory);
}
advisories.Sort((left, right) => string.Compare(left.AdvisoryKey, right.AdvisoryKey, StringComparison.Ordinal));
var snapshot = SnapshotSerializer.ToSnapshot(advisories);
var outputPath = Path.Combine(fixturesPath, "osv-ghsa.ghsa.json");
File.WriteAllText(outputPath, snapshot);
_info($"[FixtureUpdater] Updated {outputPath}");
}
}
private void RewriteCreditParityFixtures(string ghsaFixturesPath, string nvdFixturesPath)
{
Directory.CreateDirectory(ghsaFixturesPath);
Directory.CreateDirectory(nvdFixturesPath);
var advisoryKeyGhsa = "GHSA-credit-parity";
var advisoryKeyNvd = "CVE-2025-5555";
var recordedAt = new DateTimeOffset(2025, 10, 10, 15, 0, 0, TimeSpan.Zero);
var published = new DateTimeOffset(2025, 10, 9, 18, 30, 0, TimeSpan.Zero);
var modified = new DateTimeOffset(2025, 10, 10, 12, 0, 0, TimeSpan.Zero);
AdvisoryCredit[] CreateCredits(string source) =>
[
CreateCredit("Alice Researcher", "reporter", new[] { "mailto:alice.researcher@example.com" }, source),
CreateCredit("Bob Maintainer", "remediation_developer", new[] { "https://github.com/acme/bob-maintainer" }, source)
];
AdvisoryCredit CreateCredit(string displayName, string role, IReadOnlyList<string> contacts, string source)
{
var provenance = new AdvisoryProvenance(
source,
"credit",
$"{source}:{displayName.ToLowerInvariant().Replace(' ', '-')}",
recordedAt,
new[] { ProvenanceFieldMasks.Credits });
return new AdvisoryCredit(displayName, role, contacts, provenance);
}
AdvisoryReference[] CreateReferences(string sourceName, params (string Url, string Kind)[] entries)
{
if (entries is null || entries.Length == 0)
{
return Array.Empty<AdvisoryReference>();
}
var references = new List<AdvisoryReference>(entries.Length);
foreach (var entry in entries)
{
var provenance = new AdvisoryProvenance(
sourceName,
"reference",
entry.Url,
recordedAt,
new[] { ProvenanceFieldMasks.References });
references.Add(new AdvisoryReference(
entry.Url,
entry.Kind,
sourceTag: null,
summary: null,
provenance));
}
return references.ToArray();
}
Advisory CreateAdvisory(
string sourceName,
string advisoryKey,
IEnumerable<string> aliases,
AdvisoryCredit[] credits,
AdvisoryReference[] references,
string documentValue)
{
var documentProvenance = new AdvisoryProvenance(
sourceName,
"document",
documentValue,
recordedAt,
new[] { ProvenanceFieldMasks.Advisory });
var mappingProvenance = new AdvisoryProvenance(
sourceName,
"mapping",
advisoryKey,
recordedAt,
new[] { ProvenanceFieldMasks.Advisory });
return new Advisory(
advisoryKey,
"Credit parity regression fixture",
"Credit parity regression fixture",
"en",
published,
modified,
"moderate",
exploitKnown: false,
aliases,
credits,
references,
Array.Empty<AffectedPackage>(),
Array.Empty<CvssMetric>(),
new[] { documentProvenance, mappingProvenance });
}
var ghsa = CreateAdvisory(
"ghsa",
advisoryKeyGhsa,
new[] { advisoryKeyGhsa, advisoryKeyNvd },
CreateCredits("ghsa"),
CreateReferences(
"ghsa",
($"https://github.com/advisories/{advisoryKeyGhsa}", "advisory"),
("https://example.com/ghsa/patch", "patch")),
$"security/advisories/{advisoryKeyGhsa}");
var osv = CreateAdvisory(
OsvConnectorPlugin.SourceName,
advisoryKeyGhsa,
new[] { advisoryKeyGhsa, advisoryKeyNvd },
CreateCredits(OsvConnectorPlugin.SourceName),
CreateReferences(
OsvConnectorPlugin.SourceName,
($"https://github.com/advisories/{advisoryKeyGhsa}", "advisory"),
($"https://osv.dev/vulnerability/{advisoryKeyGhsa}", "advisory")),
$"https://osv.dev/vulnerability/{advisoryKeyGhsa}");
var nvd = CreateAdvisory(
NvdConnectorPlugin.SourceName,
advisoryKeyNvd,
new[] { advisoryKeyNvd, advisoryKeyGhsa },
CreateCredits(NvdConnectorPlugin.SourceName),
CreateReferences(
NvdConnectorPlugin.SourceName,
($"https://services.nvd.nist.gov/vuln/detail/{advisoryKeyNvd}", "advisory"),
("https://example.com/nvd/reference", "report")),
$"https://services.nvd.nist.gov/vuln/detail/{advisoryKeyNvd}");
var ghsaSnapshot = SnapshotSerializer.ToSnapshot(ghsa);
var osvSnapshot = SnapshotSerializer.ToSnapshot(osv);
var nvdSnapshot = SnapshotSerializer.ToSnapshot(nvd);
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.ghsa.json"), ghsaSnapshot);
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.osv.json"), osvSnapshot);
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.nvd.json"), nvdSnapshot);
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.ghsa.json"), ghsaSnapshot);
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.osv.json"), osvSnapshot);
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.nvd.json"), nvdSnapshot);
_info($"[FixtureUpdater] Updated credit parity fixtures under {ghsaFixturesPath} and {nvdFixturesPath}");
}
private void ReportError(string message)
{
_errors++;
_error(message);
}
}
internal sealed class FixtureDeterminism
{
private readonly DateTimeOffset _fixedTime;
public FixtureDeterminism(DateTimeOffset fixedTime)
{
_fixedTime = fixedTime;
}
public DateTimeOffset UtcNow => _fixedTime;
public Guid CreateGuid(string scope, string key)
=> CreateDeterministicGuid($"{scope}:{key}");
private static Guid CreateDeterministicGuid(string value)
{
using var sha = SHA256.Create();
var hash = sha.ComputeHash(Encoding.UTF8.GetBytes(value));
Span<byte> bytes = stackalloc byte[16];
hash.AsSpan(0, 16).CopyTo(bytes);
bytes[6] = (byte)((bytes[6] & 0x0F) | 0x50);
bytes[8] = (byte)((bytes[8] & 0x3F) | 0x80);
return new Guid(bytes);
}
}
internal static class RepoRootLocator
{
public static string? TryResolve(string? repoRoot)
{
if (!string.IsNullOrWhiteSpace(repoRoot))
{
return Path.GetFullPath(repoRoot);
}
var current = new DirectoryInfo(Directory.GetCurrentDirectory());
while (current is not null)
{
var solutionPath = Path.Combine(current.FullName, "src", "StellaOps.sln");
if (File.Exists(solutionPath))
{
return current.FullName;
}
current = current.Parent;
}
return null;
}
}
internal static class FixtureUpdaterDefaults
{
public static readonly DateTimeOffset DefaultFixedTime = new(2025, 1, 5, 0, 0, 0, TimeSpan.Zero);
public static readonly string OsvFixturesRelative = Path.Combine("src", "Concelier", "__Tests", "StellaOps.Concelier.Connector.Osv.Tests", "Fixtures");
public static readonly string GhsaFixturesRelative = Path.Combine("src", "Concelier", "__Tests", "StellaOps.Concelier.Connector.Ghsa.Tests", "Fixtures");
public static readonly string NvdFixturesRelative = Path.Combine("src", "Concelier", "__Tests", "StellaOps.Concelier.Connector.Nvd.Tests", "Nvd", "Fixtures");
}

View File

@@ -1,377 +1,3 @@
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Connector.Ghsa;
using StellaOps.Concelier.Connector.Common;
using StellaOps.Concelier.Connector.Ghsa.Internal;
using StellaOps.Concelier.Connector.Osv.Internal;
using StellaOps.Concelier.Connector.Osv;
using StellaOps.Concelier.Connector.Nvd;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Documents;
using StellaOps.Tools.FixtureUpdater;
var serializerOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
var projectRoot = Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "..", ".."));
var osvFixturesPath = Path.Combine(projectRoot, "src", "StellaOps.Concelier.Connector.Osv.Tests", "Fixtures");
var ghsaFixturesPath = Path.Combine(projectRoot, "src", "StellaOps.Concelier.Connector.Ghsa.Tests", "Fixtures");
var nvdFixturesPath = Path.Combine(projectRoot, "src", "StellaOps.Concelier.Connector.Nvd.Tests", "Nvd", "Fixtures");
RewriteOsvFixtures(osvFixturesPath);
RewriteSnapshotFixtures(osvFixturesPath);
RewriteGhsaFixtures(osvFixturesPath);
RewriteCreditParityFixtures(ghsaFixturesPath, nvdFixturesPath);
return;
void RewriteOsvFixtures(string fixturesPath)
{
var rawPath = Path.Combine(fixturesPath, "osv-ghsa.raw-osv.json");
if (!File.Exists(rawPath))
{
Console.WriteLine($"[FixtureUpdater] OSV raw fixture missing: {rawPath}");
return;
}
using var document = JsonDocument.Parse(File.ReadAllText(rawPath));
var advisories = new List<Advisory>();
foreach (var element in document.RootElement.EnumerateArray())
{
var dto = JsonSerializer.Deserialize<OsvVulnerabilityDto>(element.GetRawText(), serializerOptions);
if (dto is null)
{
continue;
}
var ecosystem = dto.Affected?.FirstOrDefault()?.Package?.Ecosystem ?? "unknown";
var uri = new Uri($"https://osv.dev/vulnerability/{dto.Id}");
var documentRecord = new DocumentRecord(
Guid.NewGuid(),
OsvConnectorPlugin.SourceName,
uri.ToString(),
DateTimeOffset.UtcNow,
"fixture-sha",
DocumentStatuses.PendingMap,
"application/json",
null,
new Dictionary<string, string>(StringComparer.Ordinal)
{
["osv.ecosystem"] = ecosystem,
},
null,
DateTimeOffset.UtcNow,
null,
null);
var payload = DocumentObject.Parse(element.GetRawText());
var dtoRecord = new DtoRecord(
Guid.NewGuid(),
documentRecord.Id,
OsvConnectorPlugin.SourceName,
"osv.v1",
payload,
DateTimeOffset.UtcNow);
var advisory = OsvMapper.Map(dto, documentRecord, dtoRecord, ecosystem);
advisories.Add(advisory);
}
advisories.Sort((left, right) => string.Compare(left.AdvisoryKey, right.AdvisoryKey, StringComparison.Ordinal));
var snapshot = SnapshotSerializer.ToSnapshot(advisories);
File.WriteAllText(Path.Combine(fixturesPath, "osv-ghsa.osv.json"), snapshot);
Console.WriteLine($"[FixtureUpdater] Updated {Path.Combine(fixturesPath, "osv-ghsa.osv.json")}");
}
void RewriteSnapshotFixtures(string fixturesPath)
{
var baselinePublished = new DateTimeOffset(2025, 1, 5, 12, 0, 0, TimeSpan.Zero);
var baselineModified = new DateTimeOffset(2025, 1, 8, 6, 30, 0, TimeSpan.Zero);
var baselineFetched = new DateTimeOffset(2025, 1, 8, 7, 0, 0, TimeSpan.Zero);
var cases = new (string Ecosystem, string Purl, string PackageName, string SnapshotFile)[]
{
("npm", "pkg:npm/%40scope%2Fleft-pad", "@scope/left-pad", "osv-npm.snapshot.json"),
("PyPI", "pkg:pypi/requests", "requests", "osv-pypi.snapshot.json"),
};
foreach (var (ecosystem, purl, packageName, snapshotFile) in cases)
{
var dto = new OsvVulnerabilityDto
{
Id = $"OSV-2025-{ecosystem}-0001",
Summary = $"{ecosystem} package vulnerability",
Details = $"Detailed description for {ecosystem} package {packageName}.",
Published = baselinePublished,
Modified = baselineModified,
Aliases = new[] { $"CVE-2025-11{ecosystem.Length}", $"GHSA-{ecosystem.Length}abc-{ecosystem.Length}def-{ecosystem.Length}ghi" },
Related = new[] { $"OSV-RELATED-{ecosystem}-42" },
References = new[]
{
new OsvReferenceDto { Url = $"https://example.com/{ecosystem}/advisory", Type = "ADVISORY" },
new OsvReferenceDto { Url = $"https://example.com/{ecosystem}/fix", Type = "FIX" },
},
Severity = new[]
{
new OsvSeverityDto { Type = "CVSS_V3", Score = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" },
},
Affected = new[]
{
new OsvAffectedPackageDto
{
Package = new OsvPackageDto
{
Ecosystem = ecosystem,
Name = packageName,
Purl = purl,
},
Ranges = new[]
{
new OsvRangeDto
{
Type = "SEMVER",
Events = new[]
{
new OsvEventDto { Introduced = "0" },
new OsvEventDto { Fixed = "2.0.0" },
},
},
},
Versions = new[] { "1.0.0", "1.5.0" },
EcosystemSpecific = JsonDocument.Parse("{\"severity\":\"high\"}").RootElement.Clone(),
},
},
DatabaseSpecific = JsonDocument.Parse("{\"source\":\"osv.dev\"}").RootElement.Clone(),
};
var document = new DocumentRecord(
Guid.NewGuid(),
OsvConnectorPlugin.SourceName,
$"https://osv.dev/vulnerability/{dto.Id}",
baselineFetched,
"fixture-sha",
DocumentStatuses.PendingParse,
"application/json",
null,
new Dictionary<string, string>(StringComparer.Ordinal) { ["osv.ecosystem"] = ecosystem },
null,
baselineModified,
null);
var payload = DocumentObject.Parse(JsonSerializer.Serialize(dto, serializerOptions));
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, OsvConnectorPlugin.SourceName, "osv.v1", payload, baselineModified);
var advisory = OsvMapper.Map(dto, document, dtoRecord, ecosystem);
var snapshot = SnapshotSerializer.ToSnapshot(advisory);
File.WriteAllText(Path.Combine(fixturesPath, snapshotFile), snapshot);
Console.WriteLine($"[FixtureUpdater] Updated {Path.Combine(fixturesPath, snapshotFile)}");
}
}
void RewriteGhsaFixtures(string fixturesPath)
{
var rawPath = Path.Combine(fixturesPath, "osv-ghsa.raw-ghsa.json");
if (!File.Exists(rawPath))
{
Console.WriteLine($"[FixtureUpdater] GHSA raw fixture missing: {rawPath}");
return;
}
JsonDocument document;
try
{
document = JsonDocument.Parse(File.ReadAllText(rawPath));
}
catch (JsonException ex)
{
Console.WriteLine($"[FixtureUpdater] Failed to parse GHSA raw fixture '{rawPath}': {ex.Message}");
return;
}
using (document)
{
var advisories = new List<Advisory>();
foreach (var element in document.RootElement.EnumerateArray())
{
GhsaRecordDto dto;
try
{
dto = GhsaRecordParser.Parse(Encoding.UTF8.GetBytes(element.GetRawText()));
}
catch (JsonException)
{
continue;
}
var uri = new Uri($"https://github.com/advisories/{dto.GhsaId}");
var documentRecord = new DocumentRecord(
Guid.NewGuid(),
GhsaConnectorPlugin.SourceName,
uri.ToString(),
DateTimeOffset.UtcNow,
"fixture-sha",
DocumentStatuses.PendingMap,
"application/json",
null,
new Dictionary<string, string>(StringComparer.Ordinal),
null,
DateTimeOffset.UtcNow,
null,
null);
var advisory = GhsaMapper.Map(dto, documentRecord, DateTimeOffset.UtcNow);
advisories.Add(advisory);
}
advisories.Sort((left, right) => string.Compare(left.AdvisoryKey, right.AdvisoryKey, StringComparison.Ordinal));
var snapshot = SnapshotSerializer.ToSnapshot(advisories);
File.WriteAllText(Path.Combine(fixturesPath, "osv-ghsa.ghsa.json"), snapshot);
Console.WriteLine($"[FixtureUpdater] Updated {Path.Combine(fixturesPath, "osv-ghsa.ghsa.json")}");
}
}
void RewriteCreditParityFixtures(string ghsaFixturesPath, string nvdFixturesPath)
{
Directory.CreateDirectory(ghsaFixturesPath);
Directory.CreateDirectory(nvdFixturesPath);
var advisoryKeyGhsa = "GHSA-credit-parity";
var advisoryKeyNvd = "CVE-2025-5555";
var recordedAt = new DateTimeOffset(2025, 10, 10, 15, 0, 0, TimeSpan.Zero);
var published = new DateTimeOffset(2025, 10, 9, 18, 30, 0, TimeSpan.Zero);
var modified = new DateTimeOffset(2025, 10, 10, 12, 0, 0, TimeSpan.Zero);
AdvisoryCredit[] CreateCredits(string source) =>
[
CreateCredit("Alice Researcher", "reporter", new[] { "mailto:alice.researcher@example.com" }, source),
CreateCredit("Bob Maintainer", "remediation_developer", new[] { "https://github.com/acme/bob-maintainer" }, source)
];
AdvisoryCredit CreateCredit(string displayName, string role, IReadOnlyList<string> contacts, string source)
{
var provenance = new AdvisoryProvenance(
source,
"credit",
$"{source}:{displayName.ToLowerInvariant().Replace(' ', '-')}",
recordedAt,
new[] { ProvenanceFieldMasks.Credits });
return new AdvisoryCredit(displayName, role, contacts, provenance);
}
AdvisoryReference[] CreateReferences(string sourceName, params (string Url, string Kind)[] entries)
{
if (entries is null || entries.Length == 0)
{
return Array.Empty<AdvisoryReference>();
}
var references = new List<AdvisoryReference>(entries.Length);
foreach (var entry in entries)
{
var provenance = new AdvisoryProvenance(
sourceName,
"reference",
entry.Url,
recordedAt,
new[] { ProvenanceFieldMasks.References });
references.Add(new AdvisoryReference(
entry.Url,
entry.Kind,
sourceTag: null,
summary: null,
provenance));
}
return references.ToArray();
}
Advisory CreateAdvisory(
string sourceName,
string advisoryKey,
IEnumerable<string> aliases,
AdvisoryCredit[] credits,
AdvisoryReference[] references,
string documentValue)
{
var documentProvenance = new AdvisoryProvenance(
sourceName,
"document",
documentValue,
recordedAt,
new[] { ProvenanceFieldMasks.Advisory });
var mappingProvenance = new AdvisoryProvenance(
sourceName,
"mapping",
advisoryKey,
recordedAt,
new[] { ProvenanceFieldMasks.Advisory });
return new Advisory(
advisoryKey,
"Credit parity regression fixture",
"Credit parity regression fixture",
"en",
published,
modified,
"moderate",
exploitKnown: false,
aliases,
credits,
references,
Array.Empty<AffectedPackage>(),
Array.Empty<CvssMetric>(),
new[] { documentProvenance, mappingProvenance });
}
var ghsa = CreateAdvisory(
"ghsa",
advisoryKeyGhsa,
new[] { advisoryKeyGhsa, advisoryKeyNvd },
CreateCredits("ghsa"),
CreateReferences(
"ghsa",
( $"https://github.com/advisories/{advisoryKeyGhsa}", "advisory"),
( "https://example.com/ghsa/patch", "patch")),
$"security/advisories/{advisoryKeyGhsa}");
var osv = CreateAdvisory(
OsvConnectorPlugin.SourceName,
advisoryKeyGhsa,
new[] { advisoryKeyGhsa, advisoryKeyNvd },
CreateCredits(OsvConnectorPlugin.SourceName),
CreateReferences(
OsvConnectorPlugin.SourceName,
( $"https://github.com/advisories/{advisoryKeyGhsa}", "advisory"),
( $"https://osv.dev/vulnerability/{advisoryKeyGhsa}", "advisory")),
$"https://osv.dev/vulnerability/{advisoryKeyGhsa}");
var nvd = CreateAdvisory(
NvdConnectorPlugin.SourceName,
advisoryKeyNvd,
new[] { advisoryKeyNvd, advisoryKeyGhsa },
CreateCredits(NvdConnectorPlugin.SourceName),
CreateReferences(
NvdConnectorPlugin.SourceName,
( $"https://services.nvd.nist.gov/vuln/detail/{advisoryKeyNvd}", "advisory"),
( "https://example.com/nvd/reference", "report")),
$"https://services.nvd.nist.gov/vuln/detail/{advisoryKeyNvd}");
var ghsaSnapshot = SnapshotSerializer.ToSnapshot(ghsa);
var osvSnapshot = SnapshotSerializer.ToSnapshot(osv);
var nvdSnapshot = SnapshotSerializer.ToSnapshot(nvd);
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.ghsa.json"), ghsaSnapshot);
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.osv.json"), osvSnapshot);
File.WriteAllText(Path.Combine(ghsaFixturesPath, "credit-parity.nvd.json"), nvdSnapshot);
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.ghsa.json"), ghsaSnapshot);
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.osv.json"), osvSnapshot);
File.WriteAllText(Path.Combine(nvdFixturesPath, "credit-parity.nvd.json"), nvdSnapshot);
Console.WriteLine($"[FixtureUpdater] Updated credit parity fixtures under {ghsaFixturesPath} and {nvdFixturesPath}");
}
return await FixtureUpdaterApp.RunAsync(args);

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("LanguageAnalyzerSmoke.Tests")]

View File

@@ -10,6 +10,7 @@
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="System.CommandLine" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\Scanner\__Libraries\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj" />

View File

@@ -0,0 +1,113 @@
using System.CommandLine;
namespace StellaOps.Tools.LanguageAnalyzerSmoke;
public static class LanguageAnalyzerSmokeApp
{
private static readonly DateTimeOffset DefaultFixedTime = new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
public static async Task<int> RunAsync(string[] args)
{
var repoRootOption = new Option<DirectoryInfo?>("--repo-root")
{
Description = "Repository root (defaults to nearest folder containing src/StellaOps.sln)."
};
var analyzerOption = new Option<string>("--analyzer")
{
Description = "Analyzer to exercise (python, rust).",
DefaultValueFactory = _ => "python"
};
var pluginDirectoryOption = new Option<string?>("--plugin-directory")
{
Description = "Analyzer plug-in directory under plugins/scanner/analyzers/lang."
};
var fixturePathOption = new Option<string?>("--fixture-path")
{
Description = "Relative path to fixtures root."
};
var allowGoldenDriftOption = new Option<bool>("--allow-golden-drift")
{
Description = "Allow golden snapshot drift without failing the run."
};
var fixedTimeOption = new Option<DateTimeOffset>("--fixed-time")
{
Description = "Fixed UTC time used by analyzers for deterministic output.",
DefaultValueFactory = _ => DefaultFixedTime
};
var useSystemTimeOption = new Option<bool>("--use-system-time")
{
Description = "Use system clock instead of fixed time."
};
var timeoutSecondsOption = new Option<int>("--timeout-seconds")
{
Description = "Timeout per scenario in seconds (0 disables timeout).",
DefaultValueFactory = _ => 120
};
var command = new RootCommand("Language analyzer smoke harness");
command.Add(repoRootOption);
command.Add(analyzerOption);
command.Add(pluginDirectoryOption);
command.Add(fixturePathOption);
command.Add(allowGoldenDriftOption);
command.Add(fixedTimeOption);
command.Add(useSystemTimeOption);
command.Add(timeoutSecondsOption);
command.SetAction(async (parseResult, cancellationToken) =>
{
var repoRoot = parseResult.GetValue(repoRootOption);
var analyzer = parseResult.GetValue(analyzerOption) ?? "python";
var pluginDirectory = parseResult.GetValue(pluginDirectoryOption);
var fixturePath = parseResult.GetValue(fixturePathOption);
var allowGoldenDrift = parseResult.GetValue(allowGoldenDriftOption);
var fixedTime = parseResult.GetValue(fixedTimeOption);
var useSystemTime = parseResult.GetValue(useSystemTimeOption);
var timeoutSeconds = parseResult.GetValue(timeoutSecondsOption);
var resolvedRepoRoot = RepoRootLocator.TryResolve(repoRoot?.FullName);
if (resolvedRepoRoot is null)
{
Console.Error.WriteLine("[FAIL] Unable to resolve repo root. Provide --repo-root explicitly.");
return 2;
}
var options = SmokeOptions.Resolve(
repoRoot: resolvedRepoRoot,
analyzerId: analyzer,
pluginDirectoryName: pluginDirectory,
fixtureRelativePath: fixturePath,
allowGoldenDrift: allowGoldenDrift,
fixedTime: fixedTime,
useSystemTime: useSystemTime,
timeoutSeconds: timeoutSeconds);
var runner = new LanguageAnalyzerSmokeRunner(Console.WriteLine, Console.Error.WriteLine);
try
{
var profile = await runner.RunAsync(options, cancellationToken).ConfigureAwait(false);
Console.WriteLine($"[OK] {profile.DisplayName} analyzer smoke checks passed");
return 0;
}
catch (OperationCanceledException ex)
{
Console.Error.WriteLine($"[FAIL] Smoke run canceled: {ex.Message}");
return 1;
}
catch (Exception ex)
{
Console.Error.WriteLine($"[FAIL] {ex.Message}");
return 1;
}
});
return await command.Parse(args).InvokeAsync().ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,450 @@
using System.Collections.Immutable;
using System.Diagnostics;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Scanner.Analyzers.Lang;
using StellaOps.Scanner.Analyzers.Lang.Plugin;
using StellaOps.Scanner.Core.Security;
namespace StellaOps.Tools.LanguageAnalyzerSmoke;
public sealed record SmokeScenario(string Name, string[] UsageHintRelatives)
{
public IReadOnlyList<string> ResolveUsageHints(string scenarioRoot)
=> UsageHintRelatives.Select(relative => Path.GetFullPath(Path.Combine(scenarioRoot, relative))).ToArray();
}
public sealed record AnalyzerProfile(
string DisplayName,
string AnalyzerId,
string PluginDirectory,
string FixtureRelativePath,
string ExpectedPluginId,
string ExpectedEntryPointType,
IReadOnlyList<string> RequiredCapabilities,
SmokeScenario[] Scenarios);
public static class AnalyzerProfileCatalog
{
private static readonly SmokeScenario[] PythonScenarios =
[
new("simple-venv", new[] { Path.Combine("bin", "simple-tool") }),
new("pip-cache", new[] { Path.Combine("lib", "python3.11", "site-packages", "cache_pkg-1.2.3.data", "scripts", "cache-tool") }),
new("layered-editable", new[] { Path.Combine("layer1", "usr", "bin", "layered-cli") }),
];
private static readonly SmokeScenario[] RustScenarios =
[
new("simple", new[] { Path.Combine("usr", "local", "bin", "my_app") }),
new("heuristics", new[] { Path.Combine("usr", "local", "bin", "heuristic_app") }),
new("fallback", new[] { Path.Combine("usr", "local", "bin", "opaque_bin") }),
];
public static readonly IReadOnlyDictionary<string, AnalyzerProfile> Profiles =
new Dictionary<string, AnalyzerProfile>(StringComparer.OrdinalIgnoreCase)
{
["python"] = new AnalyzerProfile(
DisplayName: "Python",
AnalyzerId: "python",
PluginDirectory: "StellaOps.Scanner.Analyzers.Lang.Python",
FixtureRelativePath: Path.Combine("src", "Scanner", "__Tests", "StellaOps.Scanner.Analyzers.Lang.Python.Tests", "Fixtures", "lang", "python"),
ExpectedPluginId: "stellaops.analyzer.lang.python",
ExpectedEntryPointType: "StellaOps.Scanner.Analyzers.Lang.Python.PythonAnalyzerPlugin",
RequiredCapabilities: new[] { "python" },
Scenarios: PythonScenarios),
["rust"] = new AnalyzerProfile(
DisplayName: "Rust",
AnalyzerId: "rust",
PluginDirectory: "StellaOps.Scanner.Analyzers.Lang.Rust",
FixtureRelativePath: Path.Combine("src", "Scanner", "__Tests", "StellaOps.Scanner.Analyzers.Lang.Tests", "Fixtures", "lang", "rust"),
ExpectedPluginId: "stellaops.analyzer.lang.rust",
ExpectedEntryPointType: "StellaOps.Scanner.Analyzers.Lang.Rust.RustAnalyzerPlugin",
RequiredCapabilities: new[] { "rust", "cargo" },
Scenarios: RustScenarios),
};
public static AnalyzerProfile GetProfile(string analyzerId)
{
if (!Profiles.TryGetValue(analyzerId, out var profile))
{
throw new ArgumentException($"Unsupported analyzer '{analyzerId}'.", nameof(analyzerId));
}
return profile;
}
}
public sealed record SmokeOptions(
string RepoRoot,
string AnalyzerId,
string PluginDirectoryName,
string FixtureRelativePath,
bool AllowGoldenDrift,
DateTimeOffset FixedTime,
bool UseSystemTime,
TimeSpan? Timeout)
{
public static SmokeOptions Resolve(
string repoRoot,
string analyzerId,
string? pluginDirectoryName,
string? fixtureRelativePath,
bool allowGoldenDrift,
DateTimeOffset fixedTime,
bool useSystemTime,
int timeoutSeconds)
{
var profile = AnalyzerProfileCatalog.GetProfile(analyzerId);
var resolvedPluginDirectory = string.IsNullOrWhiteSpace(pluginDirectoryName)
? profile.PluginDirectory
: pluginDirectoryName;
var resolvedFixturePath = string.IsNullOrWhiteSpace(fixtureRelativePath)
? profile.FixtureRelativePath
: fixtureRelativePath;
var timeout = timeoutSeconds <= 0 ? (TimeSpan?)null : TimeSpan.FromSeconds(timeoutSeconds);
return new SmokeOptions(
RepoRoot: Path.GetFullPath(repoRoot),
AnalyzerId: profile.AnalyzerId,
PluginDirectoryName: resolvedPluginDirectory,
FixtureRelativePath: resolvedFixturePath,
AllowGoldenDrift: allowGoldenDrift,
FixedTime: fixedTime,
UseSystemTime: useSystemTime,
Timeout: timeout);
}
}
public sealed record PluginManifest
{
[JsonPropertyName("schemaVersion")]
public string SchemaVersion { get; init; } = string.Empty;
[JsonPropertyName("id")]
public string Id { get; init; } = string.Empty;
[JsonPropertyName("displayName")]
public string DisplayName { get; init; } = string.Empty;
[JsonPropertyName("version")]
public string Version { get; init; } = string.Empty;
[JsonPropertyName("requiresRestart")]
public bool RequiresRestart { get; init; }
[JsonPropertyName("entryPoint")]
public PluginEntryPoint EntryPoint { get; init; } = new();
[JsonPropertyName("capabilities")]
public IReadOnlyList<string> Capabilities { get; init; } = Array.Empty<string>();
[JsonPropertyName("metadata")]
public IReadOnlyDictionary<string, string> Metadata { get; init; } = ImmutableDictionary<string, string>.Empty;
}
public sealed record PluginEntryPoint
{
[JsonPropertyName("type")]
public string Type { get; init; } = string.Empty;
[JsonPropertyName("assembly")]
public string Assembly { get; init; } = string.Empty;
[JsonPropertyName("typeName")]
public string TypeName { get; init; } = string.Empty;
}
public sealed class LanguageAnalyzerSmokeRunner
{
private readonly Action<string> _info;
private readonly Action<string> _error;
public LanguageAnalyzerSmokeRunner(Action<string>? info = null, Action<string>? error = null)
{
_info = info ?? (_ => { });
_error = error ?? (_ => { });
}
public async Task<AnalyzerProfile> RunAsync(SmokeOptions options, CancellationToken cancellationToken)
{
var profile = AnalyzerProfileCatalog.GetProfile(options.AnalyzerId);
ValidateOptions(options);
var pluginRoot = Path.Combine(options.RepoRoot, "plugins", "scanner", "analyzers", "lang", options.PluginDirectoryName);
var manifestPath = Path.Combine(pluginRoot, "manifest.json");
if (!File.Exists(manifestPath))
{
throw new FileNotFoundException($"Plug-in manifest not found at '{manifestPath}'.", manifestPath);
}
using var manifestStream = File.OpenRead(manifestPath);
var manifest = JsonSerializer.Deserialize<PluginManifest>(manifestStream, new JsonSerializerOptions
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip
}) ?? throw new InvalidOperationException($"Unable to parse manifest '{manifestPath}'.");
ValidateManifest(manifest, profile, options.PluginDirectoryName);
var pluginAssemblyPath = Path.Combine(pluginRoot, manifest.EntryPoint.Assembly);
if (!File.Exists(pluginAssemblyPath))
{
throw new FileNotFoundException($"Plug-in assembly '{manifest.EntryPoint.Assembly}' not found under '{pluginRoot}'.", pluginAssemblyPath);
}
var sha256 = ComputeSha256(pluginAssemblyPath);
_info($"-> Plug-in assembly SHA-256: {sha256}");
using var serviceProvider = BuildServiceProvider();
var catalog = new LanguageAnalyzerPluginCatalog(new RestartOnlyPluginGuard(), NullLogger<LanguageAnalyzerPluginCatalog>.Instance);
catalog.LoadFromDirectory(pluginRoot, seal: true);
if (catalog.Plugins.Count == 0)
{
throw new InvalidOperationException($"No analyzer plug-ins were loaded from '{pluginRoot}'.");
}
var analyzerSet = catalog.CreateAnalyzers(serviceProvider);
if (analyzerSet.Count == 0)
{
throw new InvalidOperationException("Language analyzer plug-ins reported no analyzers.");
}
var analyzerIds = analyzerSet.Select(analyzer => analyzer.Id).ToArray();
_info($"-> Loaded analyzers: {string.Join(", ", analyzerIds)}");
if (!analyzerIds.Contains(profile.AnalyzerId, StringComparer.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"{profile.DisplayName} analyzer was not created by the plug-in.");
}
var fixtureRoot = Path.GetFullPath(Path.Combine(options.RepoRoot, options.FixtureRelativePath));
if (!Directory.Exists(fixtureRoot))
{
throw new DirectoryNotFoundException($"Fixture directory '{fixtureRoot}' does not exist.");
}
var timeProvider = options.UseSystemTime ? TimeProvider.System : new FixedTimeProvider(options.FixedTime);
foreach (var scenario in profile.Scenarios)
{
await RunScenarioAsync(scenario, fixtureRoot, catalog, serviceProvider, options, timeProvider, cancellationToken).ConfigureAwait(false);
}
return profile;
}
internal static void ValidateManifest(PluginManifest manifest, AnalyzerProfile profile, string pluginDirectoryName)
{
if (!string.Equals(manifest.SchemaVersion, "1.0", StringComparison.Ordinal))
{
throw new InvalidOperationException($"Unexpected manifest schema version '{manifest.SchemaVersion}'.");
}
if (!manifest.RequiresRestart)
{
throw new InvalidOperationException("Language analyzer plug-in must be marked as restart-only.");
}
if (!string.Equals(manifest.EntryPoint.Type, "dotnet", StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"Unsupported entry point type '{manifest.EntryPoint.Type}'.");
}
foreach (var capability in profile.RequiredCapabilities)
{
if (!manifest.Capabilities.Contains(capability, StringComparer.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"Manifest capabilities do not include required capability '{capability}'.");
}
}
if (!string.Equals(manifest.EntryPoint.TypeName, profile.ExpectedEntryPointType, StringComparison.Ordinal))
{
throw new InvalidOperationException($"Unexpected entry point type name '{manifest.EntryPoint.TypeName}'.");
}
if (!string.Equals(manifest.Id, profile.ExpectedPluginId, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"Manifest id '{manifest.Id}' does not match expected plug-in id for directory '{pluginDirectoryName}'.");
}
}
internal static void CompareGoldenSnapshot(string scenarioName, string actualJson, string? goldenNormalized, bool allowGoldenDrift, Action<string> info)
{
if (goldenNormalized is null)
{
return;
}
if (!string.Equals(actualJson, goldenNormalized, StringComparison.Ordinal))
{
if (allowGoldenDrift)
{
info($"[WARN] Scenario '{scenarioName}' output deviates from repository golden snapshot.");
return;
}
throw new InvalidOperationException($"Scenario '{scenarioName}' output deviates from repository golden snapshot.");
}
}
private async Task RunScenarioAsync(
SmokeScenario scenario,
string fixtureRoot,
ILanguageAnalyzerPluginCatalog catalog,
IServiceProvider services,
SmokeOptions options,
TimeProvider timeProvider,
CancellationToken cancellationToken)
{
var scenarioRoot = Path.Combine(fixtureRoot, scenario.Name);
if (!Directory.Exists(scenarioRoot))
{
throw new DirectoryNotFoundException($"Scenario '{scenario.Name}' directory missing at '{scenarioRoot}'.");
}
var goldenPath = Path.Combine(scenarioRoot, "expected.json");
string? goldenNormalized = null;
if (File.Exists(goldenPath))
{
goldenNormalized = NormalizeJson(await File.ReadAllTextAsync(goldenPath, cancellationToken).ConfigureAwait(false));
}
var usageHints = new LanguageUsageHints(scenario.ResolveUsageHints(scenarioRoot));
var context = new LanguageAnalyzerContext(scenarioRoot, timeProvider, usageHints, services);
var coldEngine = new LanguageAnalyzerEngine(catalog.CreateAnalyzers(services));
var coldStopwatch = Stopwatch.StartNew();
var coldResult = await RunWithTimeoutAsync(token => coldEngine.AnalyzeAsync(context, token), options.Timeout, cancellationToken).ConfigureAwait(false);
coldStopwatch.Stop();
if (coldResult.Components.Count == 0)
{
throw new InvalidOperationException($"Scenario '{scenario.Name}' produced no components during cold run.");
}
var coldJson = NormalizeJson(coldResult.ToJson(indent: true));
CompareGoldenSnapshot(scenario.Name, coldJson, goldenNormalized, options.AllowGoldenDrift, _info);
var warmEngine = new LanguageAnalyzerEngine(catalog.CreateAnalyzers(services));
var warmStopwatch = Stopwatch.StartNew();
var warmResult = await RunWithTimeoutAsync(token => warmEngine.AnalyzeAsync(context, token), options.Timeout, cancellationToken).ConfigureAwait(false);
warmStopwatch.Stop();
var warmJson = NormalizeJson(warmResult.ToJson(indent: true));
if (!string.Equals(coldJson, warmJson, StringComparison.Ordinal))
{
throw new InvalidOperationException($"Scenario '{scenario.Name}' produced different outputs between cold and warm runs.");
}
EnsureDurationWithinBudget(scenario.Name, coldStopwatch.Elapsed, warmStopwatch.Elapsed);
_info($"[OK] Scenario '{scenario.Name}' - components {coldResult.Components.Count}, cold {coldStopwatch.Elapsed.TotalMilliseconds:F1} ms, warm {warmStopwatch.Elapsed.TotalMilliseconds:F1} ms");
}
private static async Task<T> RunWithTimeoutAsync<T>(Func<CancellationToken, ValueTask<T>> action, TimeSpan? timeout, CancellationToken cancellationToken)
{
if (timeout is null)
{
return await action(cancellationToken).ConfigureAwait(false);
}
using var cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
cts.CancelAfter(timeout.Value);
return await action(cts.Token).ConfigureAwait(false);
}
private static ServiceProvider BuildServiceProvider()
{
var services = new ServiceCollection();
services.AddLogging();
return services.BuildServiceProvider();
}
private static void EnsureDurationWithinBudget(string scenarioName, TimeSpan coldDuration, TimeSpan warmDuration)
{
var coldBudget = TimeSpan.FromSeconds(30);
var warmBudget = TimeSpan.FromSeconds(5);
if (coldDuration > coldBudget)
{
throw new InvalidOperationException($"Scenario '{scenarioName}' cold run exceeded budget ({coldDuration.TotalSeconds:F2}s > {coldBudget.TotalSeconds:F2}s)." );
}
if (warmDuration > warmBudget)
{
throw new InvalidOperationException($"Scenario '{scenarioName}' warm run exceeded budget ({warmDuration.TotalSeconds:F2}s > {warmBudget.TotalSeconds:F2}s)." );
}
}
private static string NormalizeJson(string json)
=> json.Replace("\r\n", "\n", StringComparison.Ordinal).TrimEnd();
private static void ValidateOptions(SmokeOptions options)
{
if (!Directory.Exists(options.RepoRoot))
{
throw new DirectoryNotFoundException($"Repository root '{options.RepoRoot}' does not exist.");
}
}
private static string ComputeSha256(string path)
{
using var hash = SHA256.Create();
using var stream = File.OpenRead(path);
var digest = hash.ComputeHash(stream);
var builder = new StringBuilder(digest.Length * 2);
foreach (var b in digest)
{
builder.Append(b.ToString("x2"));
}
return builder.ToString();
}
}
internal sealed class FixedTimeProvider : TimeProvider
{
private readonly DateTimeOffset _fixedTime;
private readonly long _timestamp;
public FixedTimeProvider(DateTimeOffset fixedTime)
{
_fixedTime = fixedTime;
_timestamp = fixedTime.UtcTicks;
}
public override DateTimeOffset GetUtcNow() => _fixedTime;
public override long GetTimestamp() => _timestamp;
}
internal static class RepoRootLocator
{
public static string? TryResolve(string? repoRoot)
{
if (!string.IsNullOrWhiteSpace(repoRoot))
{
return Path.GetFullPath(repoRoot);
}
var current = new DirectoryInfo(Directory.GetCurrentDirectory());
while (current is not null)
{
var solutionPath = Path.Combine(current.FullName, "src", "StellaOps.sln");
if (File.Exists(solutionPath))
{
return current.FullName;
}
current = current.Parent;
}
return null;
}
}

View File

@@ -1,434 +1,3 @@
using System.Collections.Immutable;
using System.Diagnostics;
using System.Reflection;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Scanner.Analyzers.Lang;
using StellaOps.Scanner.Analyzers.Lang.Plugin;
using StellaOps.Scanner.Core.Security;
using StellaOps.Tools.LanguageAnalyzerSmoke;
internal sealed record SmokeScenario(string Name, string[] UsageHintRelatives)
{
public IReadOnlyList<string> ResolveUsageHints(string scenarioRoot)
=> UsageHintRelatives.Select(relative => Path.GetFullPath(Path.Combine(scenarioRoot, relative))).ToArray();
}
internal sealed record AnalyzerProfile(
string DisplayName,
string AnalyzerId,
string PluginDirectory,
string FixtureRelativePath,
string ExpectedPluginId,
string ExpectedEntryPointType,
IReadOnlyList<string> RequiredCapabilities,
SmokeScenario[] Scenarios);
internal static class AnalyzerProfileCatalog
{
private static readonly SmokeScenario[] PythonScenarios =
{
new("simple-venv", new[] { Path.Combine("bin", "simple-tool") }),
new("pip-cache", new[] { Path.Combine("lib", "python3.11", "site-packages", "cache_pkg-1.2.3.data", "scripts", "cache-tool") }),
new("layered-editable", new[] { Path.Combine("layer1", "usr", "bin", "layered-cli") }),
};
private static readonly SmokeScenario[] RustScenarios =
{
new("simple", new[] { Path.Combine("usr", "local", "bin", "my_app") }),
new("heuristics", new[] { Path.Combine("usr", "local", "bin", "heuristic_app") }),
new("fallback", new[] { Path.Combine("usr", "local", "bin", "opaque_bin") }),
};
public static readonly IReadOnlyDictionary<string, AnalyzerProfile> Profiles =
new Dictionary<string, AnalyzerProfile>(StringComparer.OrdinalIgnoreCase)
{
["python"] = new AnalyzerProfile(
DisplayName: "Python",
AnalyzerId: "python",
PluginDirectory: "StellaOps.Scanner.Analyzers.Lang.Python",
FixtureRelativePath: Path.Combine("src", "Scanner", "__Tests", "StellaOps.Scanner.Analyzers.Lang.Python.Tests", "Fixtures", "lang", "python"),
ExpectedPluginId: "stellaops.analyzer.lang.python",
ExpectedEntryPointType: "StellaOps.Scanner.Analyzers.Lang.Python.PythonAnalyzerPlugin",
RequiredCapabilities: new[] { "python" },
Scenarios: PythonScenarios),
["rust"] = new AnalyzerProfile(
DisplayName: "Rust",
AnalyzerId: "rust",
PluginDirectory: "StellaOps.Scanner.Analyzers.Lang.Rust",
FixtureRelativePath: Path.Combine("src", "Scanner", "__Tests", "StellaOps.Scanner.Analyzers.Lang.Tests", "Fixtures", "lang", "rust"),
ExpectedPluginId: "stellaops.analyzer.lang.rust",
ExpectedEntryPointType: "StellaOps.Scanner.Analyzers.Lang.Rust.RustAnalyzerPlugin",
RequiredCapabilities: new[] { "rust", "cargo" },
Scenarios: RustScenarios),
};
}
internal sealed class SmokeOptions
{
public string RepoRoot { get; set; } = Directory.GetCurrentDirectory();
public string AnalyzerId { get; set; } = "python";
public string PluginDirectoryName { get; set; } = "StellaOps.Scanner.Analyzers.Lang.Python";
public string FixtureRelativePath { get; set; } = Path.Combine("src", "Scanner", "__Tests", "StellaOps.Scanner.Analyzers.Lang.Python.Tests", "Fixtures", "lang", "python");
public bool PluginDirectoryExplicit { get; private set; }
public bool FixturePathExplicit { get; private set; }
public static SmokeOptions Parse(string[] args)
{
var options = new SmokeOptions();
for (var index = 0; index < args.Length; index++)
{
var current = args[index];
switch (current)
{
case "--repo-root":
case "-r":
options.RepoRoot = RequireValue(args, ref index, current);
break;
case "--plugin-directory":
case "-p":
options.PluginDirectoryName = RequireValue(args, ref index, current);
options.PluginDirectoryExplicit = true;
break;
case "--fixture-path":
case "-f":
options.FixtureRelativePath = RequireValue(args, ref index, current);
options.FixturePathExplicit = true;
break;
case "--analyzer":
case "-a":
options.AnalyzerId = RequireValue(args, ref index, current);
break;
case "--help":
case "-h":
PrintUsage();
Environment.Exit(0);
break;
default:
throw new ArgumentException($"Unknown argument '{current}'. Use --help for usage.");
}
}
options.RepoRoot = Path.GetFullPath(options.RepoRoot);
if (!AnalyzerProfileCatalog.Profiles.TryGetValue(options.AnalyzerId, out var profile))
{
throw new ArgumentException($"Unsupported analyzer '{options.AnalyzerId}'.");
}
if (!options.PluginDirectoryExplicit)
{
options.PluginDirectoryName = profile.PluginDirectory;
}
if (!options.FixturePathExplicit)
{
options.FixtureRelativePath = profile.FixtureRelativePath;
}
return options;
}
private static string RequireValue(string[] args, ref int index, string switchName)
{
if (index + 1 >= args.Length)
{
throw new ArgumentException($"Missing value for '{switchName}'.");
}
index++;
var value = args[index];
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentException($"Value for '{switchName}' cannot be empty.");
}
return value;
}
private static void PrintUsage()
{
Console.WriteLine("Language Analyzer Smoke Harness");
Console.WriteLine("Usage: dotnet run --project src/Tools/LanguageAnalyzerSmoke -- [options]");
Console.WriteLine();
Console.WriteLine("Options:");
Console.WriteLine(" -a, --analyzer <name> Analyzer to exercise (python, rust). Defaults to python.");
Console.WriteLine(" -r, --repo-root <path> Repository root (defaults to current working directory)");
Console.WriteLine(" -p, --plugin-directory <name> Analyzer plug-in directory under plugins/scanner/analyzers/lang (defaults to StellaOps.Scanner.Analyzers.Lang.Python)");
Console.WriteLine(" -f, --fixture-path <path> Relative path to fixtures root (defaults to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python)");
Console.WriteLine(" -h, --help Show usage information");
}
}
internal sealed record PluginManifest
{
[JsonPropertyName("schemaVersion")]
public string SchemaVersion { get; init; } = string.Empty;
[JsonPropertyName("id")]
public string Id { get; init; } = string.Empty;
[JsonPropertyName("displayName")]
public string DisplayName { get; init; } = string.Empty;
[JsonPropertyName("version")]
public string Version { get; init; } = string.Empty;
[JsonPropertyName("requiresRestart")]
public bool RequiresRestart { get; init; }
[JsonPropertyName("entryPoint")]
public PluginEntryPoint EntryPoint { get; init; } = new();
[JsonPropertyName("capabilities")]
public IReadOnlyList<string> Capabilities { get; init; } = Array.Empty<string>();
[JsonPropertyName("metadata")]
public IReadOnlyDictionary<string, string> Metadata { get; init; } = ImmutableDictionary<string, string>.Empty;
}
internal sealed record PluginEntryPoint
{
[JsonPropertyName("type")]
public string Type { get; init; } = string.Empty;
[JsonPropertyName("assembly")]
public string Assembly { get; init; } = string.Empty;
[JsonPropertyName("typeName")]
public string TypeName { get; init; } = string.Empty;
}
file static class Program
{
private static readonly SmokeScenario[] PythonScenarios =
{
new("simple-venv", new[] { Path.Combine("bin", "simple-tool") }),
new("pip-cache", new[] { Path.Combine("lib", "python3.11", "site-packages", "cache_pkg-1.2.3.data", "scripts", "cache-tool") }),
new("layered-editable", new[] { Path.Combine("layer1", "usr", "bin", "layered-cli") })
};
public static async Task<int> Main(string[] args)
{
try
{
var options = SmokeOptions.Parse(args);
var profile = await RunAsync(options).ConfigureAwait(false);
Console.WriteLine($"✅ {profile.DisplayName} analyzer smoke checks passed");
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine($"❌ {ex.Message}");
return 1;
}
}
private static async Task<AnalyzerProfile> RunAsync(SmokeOptions options)
{
if (!AnalyzerProfileCatalog.Profiles.TryGetValue(options.AnalyzerId, out var profile))
{
throw new ArgumentException($"Analyzer '{options.AnalyzerId}' is not supported.");
}
ValidateOptions(options);
var pluginRoot = Path.Combine(options.RepoRoot, "plugins", "scanner", "analyzers", "lang", options.PluginDirectoryName);
var manifestPath = Path.Combine(pluginRoot, "manifest.json");
if (!File.Exists(manifestPath))
{
throw new FileNotFoundException($"Plug-in manifest not found at '{manifestPath}'.", manifestPath);
}
using var manifestStream = File.OpenRead(manifestPath);
var manifest = JsonSerializer.Deserialize<PluginManifest>(manifestStream, new JsonSerializerOptions
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip
}) ?? throw new InvalidOperationException($"Unable to parse manifest '{manifestPath}'.");
ValidateManifest(manifest, profile, options.PluginDirectoryName);
var pluginAssemblyPath = Path.Combine(pluginRoot, manifest.EntryPoint.Assembly);
if (!File.Exists(pluginAssemblyPath))
{
throw new FileNotFoundException($"Plug-in assembly '{manifest.EntryPoint.Assembly}' not found under '{pluginRoot}'.", pluginAssemblyPath);
}
var sha256 = ComputeSha256(pluginAssemblyPath);
Console.WriteLine($"→ Plug-in assembly SHA-256: {sha256}");
using var serviceProvider = BuildServiceProvider();
var catalog = new LanguageAnalyzerPluginCatalog(new RestartOnlyPluginGuard(), NullLogger<LanguageAnalyzerPluginCatalog>.Instance);
catalog.LoadFromDirectory(pluginRoot, seal: true);
if (catalog.Plugins.Count == 0)
{
throw new InvalidOperationException($"No analyzer plug-ins were loaded from '{pluginRoot}'.");
}
var analyzerSet = catalog.CreateAnalyzers(serviceProvider);
if (analyzerSet.Count == 0)
{
throw new InvalidOperationException("Language analyzer plug-ins reported no analyzers.");
}
var analyzerIds = analyzerSet.Select(analyzer => analyzer.Id).ToArray();
Console.WriteLine($"→ Loaded analyzers: {string.Join(", ", analyzerIds)}");
if (!analyzerIds.Contains(profile.AnalyzerId, StringComparer.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"{profile.DisplayName} analyzer was not created by the plug-in.");
}
var fixtureRoot = Path.GetFullPath(Path.Combine(options.RepoRoot, options.FixtureRelativePath));
if (!Directory.Exists(fixtureRoot))
{
throw new DirectoryNotFoundException($"Fixture directory '{fixtureRoot}' does not exist.");
}
foreach (var scenario in profile.Scenarios)
{
await RunScenarioAsync(scenario, fixtureRoot, catalog, serviceProvider).ConfigureAwait(false);
}
return profile;
}
private static ServiceProvider BuildServiceProvider()
{
var services = new ServiceCollection();
services.AddLogging();
return services.BuildServiceProvider();
}
private static async Task RunScenarioAsync(SmokeScenario scenario, string fixtureRoot, ILanguageAnalyzerPluginCatalog catalog, IServiceProvider services)
{
var scenarioRoot = Path.Combine(fixtureRoot, scenario.Name);
if (!Directory.Exists(scenarioRoot))
{
throw new DirectoryNotFoundException($"Scenario '{scenario.Name}' directory missing at '{scenarioRoot}'.");
}
var goldenPath = Path.Combine(scenarioRoot, "expected.json");
string? goldenNormalized = null;
if (File.Exists(goldenPath))
{
goldenNormalized = NormalizeJson(await File.ReadAllTextAsync(goldenPath).ConfigureAwait(false));
}
var usageHints = new LanguageUsageHints(scenario.ResolveUsageHints(scenarioRoot));
var context = new LanguageAnalyzerContext(scenarioRoot, TimeProvider.System, usageHints, services);
var coldEngine = new LanguageAnalyzerEngine(catalog.CreateAnalyzers(services));
var coldStopwatch = Stopwatch.StartNew();
var coldResult = await coldEngine.AnalyzeAsync(context, CancellationToken.None).ConfigureAwait(false);
coldStopwatch.Stop();
if (coldResult.Components.Count == 0)
{
throw new InvalidOperationException($"Scenario '{scenario.Name}' produced no components during cold run.");
}
var coldJson = NormalizeJson(coldResult.ToJson(indent: true));
if (goldenNormalized is string expected && !string.Equals(coldJson, expected, StringComparison.Ordinal))
{
Console.WriteLine($"⚠️ Scenario '{scenario.Name}' output deviates from repository golden snapshot.");
}
var warmEngine = new LanguageAnalyzerEngine(catalog.CreateAnalyzers(services));
var warmStopwatch = Stopwatch.StartNew();
var warmResult = await warmEngine.AnalyzeAsync(context, CancellationToken.None).ConfigureAwait(false);
warmStopwatch.Stop();
var warmJson = NormalizeJson(warmResult.ToJson(indent: true));
if (!string.Equals(coldJson, warmJson, StringComparison.Ordinal))
{
throw new InvalidOperationException($"Scenario '{scenario.Name}' produced different outputs between cold and warm runs.");
}
EnsureDurationWithinBudget(scenario.Name, coldStopwatch.Elapsed, warmStopwatch.Elapsed);
Console.WriteLine($"✓ Scenario '{scenario.Name}' — components {coldResult.Components.Count}, cold {coldStopwatch.Elapsed.TotalMilliseconds:F1} ms, warm {warmStopwatch.Elapsed.TotalMilliseconds:F1} ms");
}
private static void EnsureDurationWithinBudget(string scenarioName, TimeSpan coldDuration, TimeSpan warmDuration)
{
var coldBudget = TimeSpan.FromSeconds(30);
var warmBudget = TimeSpan.FromSeconds(5);
if (coldDuration > coldBudget)
{
throw new InvalidOperationException($"Scenario '{scenarioName}' cold run exceeded budget ({coldDuration.TotalSeconds:F2}s > {coldBudget.TotalSeconds:F2}s).");
}
if (warmDuration > warmBudget)
{
throw new InvalidOperationException($"Scenario '{scenarioName}' warm run exceeded budget ({warmDuration.TotalSeconds:F2}s > {warmBudget.TotalSeconds:F2}s).");
}
}
private static string NormalizeJson(string json)
=> json.Replace("\r\n", "\n", StringComparison.Ordinal).TrimEnd();
private static void ValidateOptions(SmokeOptions options)
{
if (!Directory.Exists(options.RepoRoot))
{
throw new DirectoryNotFoundException($"Repository root '{options.RepoRoot}' does not exist.");
}
}
private static void ValidateManifest(PluginManifest manifest, AnalyzerProfile profile, string pluginDirectoryName)
{
if (!string.Equals(manifest.SchemaVersion, "1.0", StringComparison.Ordinal))
{
throw new InvalidOperationException($"Unexpected manifest schema version '{manifest.SchemaVersion}'.");
}
if (!manifest.RequiresRestart)
{
throw new InvalidOperationException("Language analyzer plug-in must be marked as restart-only.");
}
if (!string.Equals(manifest.EntryPoint.Type, "dotnet", StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"Unsupported entry point type '{manifest.EntryPoint.Type}'.");
}
foreach (var capability in profile.RequiredCapabilities)
{
if (!manifest.Capabilities.Contains(capability, StringComparer.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"Manifest capabilities do not include required capability '{capability}'.");
}
}
if (!string.Equals(manifest.EntryPoint.TypeName, profile.ExpectedEntryPointType, StringComparison.Ordinal))
{
throw new InvalidOperationException($"Unexpected entry point type name '{manifest.EntryPoint.TypeName}'.");
}
if (!string.Equals(manifest.Id, profile.ExpectedPluginId, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"Manifest id '{manifest.Id}' does not match expected plug-in id for directory '{pluginDirectoryName}'.");
}
}
private static string ComputeSha256(string path)
{
using var hash = SHA256.Create();
using var stream = File.OpenRead(path);
var digest = hash.ComputeHash(stream);
var builder = new StringBuilder(digest.Length * 2);
foreach (var b in digest)
{
builder.Append(b.ToString("x2"));
}
return builder.ToString();
}
}
return await LanguageAnalyzerSmokeApp.RunAsync(args);

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("NotifySmokeCheck.Tests")]

View File

@@ -0,0 +1,21 @@
namespace StellaOps.Tools.NotifySmokeCheck;
public static class NotifySmokeCheckApp
{
public static async Task<int> RunAsync(string[] args)
{
try
{
var options = NotifySmokeOptions.FromEnvironment(Environment.GetEnvironmentVariable);
var runner = new NotifySmokeCheckRunner(options, Console.WriteLine, Console.Error.WriteLine);
await runner.RunAsync(CancellationToken.None).ConfigureAwait(false);
Console.WriteLine("[OK] Notify smoke validation completed successfully.");
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine($"[FAIL] {ex.Message}");
return 1;
}
}
}

View File

@@ -0,0 +1,482 @@
using System.Globalization;
using System.Net;
using System.Net.Http.Headers;
using System.Text.Json;
using StackExchange.Redis;
namespace StellaOps.Tools.NotifySmokeCheck;
public sealed record NotifyDeliveryOptions(
Uri BaseUri,
string Token,
string Tenant,
string TenantHeader,
TimeSpan Timeout,
int Limit);
public sealed record NotifySmokeOptions(
string RedisDsn,
string RedisStream,
IReadOnlyList<string> ExpectedKinds,
TimeSpan Lookback,
int StreamPageSize,
int StreamMaxEntries,
int RetryAttempts,
TimeSpan RetryDelay,
NotifyDeliveryOptions Delivery,
TimeProvider TimeProvider)
{
public static NotifySmokeOptions FromEnvironment(Func<string, string?> getEnv)
{
string RequireEnv(string name)
{
var value = getEnv(name);
if (string.IsNullOrWhiteSpace(value))
{
throw new InvalidOperationException($"Environment variable '{name}' is required for Notify smoke validation.");
}
return value;
}
var redisDsn = RequireEnv("NOTIFY_SMOKE_REDIS_DSN");
var redisStream = getEnv("NOTIFY_SMOKE_STREAM");
if (string.IsNullOrWhiteSpace(redisStream))
{
redisStream = "stella.events";
}
var expectedKindsEnv = RequireEnv("NOTIFY_SMOKE_EXPECT_KINDS");
var expectedKinds = expectedKindsEnv
.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
.Select(kind => kind.ToLowerInvariant())
.Distinct(StringComparer.Ordinal)
.ToArray();
if (expectedKinds.Length == 0)
{
throw new InvalidOperationException("Expected at least one event kind in NOTIFY_SMOKE_EXPECT_KINDS.");
}
var lookbackMinutesEnv = RequireEnv("NOTIFY_SMOKE_LOOKBACK_MINUTES");
if (!double.TryParse(lookbackMinutesEnv, NumberStyles.Any, CultureInfo.InvariantCulture, out var lookbackMinutes))
{
throw new InvalidOperationException("NOTIFY_SMOKE_LOOKBACK_MINUTES must be numeric.");
}
if (lookbackMinutes <= 0)
{
throw new InvalidOperationException("NOTIFY_SMOKE_LOOKBACK_MINUTES must be greater than zero.");
}
var streamPageSize = ParseInt(getEnv("NOTIFY_SMOKE_STREAM_PAGE_SIZE"), 500, min: 50);
var streamMaxEntries = ParseInt(getEnv("NOTIFY_SMOKE_STREAM_MAX_ENTRIES"), 2000, min: streamPageSize);
if (streamMaxEntries < streamPageSize)
{
streamMaxEntries = streamPageSize;
}
var retryAttempts = ParseInt(getEnv("NOTIFY_SMOKE_RETRY_ATTEMPTS"), 3, min: 1, max: 10);
var retryDelayMs = ParseInt(getEnv("NOTIFY_SMOKE_RETRY_DELAY_MS"), 250, min: 50, max: 2000);
var baseUrlRaw = RequireEnv("NOTIFY_SMOKE_NOTIFY_BASEURL").TrimEnd('/');
if (!Uri.TryCreate(baseUrlRaw, UriKind.Absolute, out var baseUri))
{
throw new InvalidOperationException("NOTIFY_SMOKE_NOTIFY_BASEURL must be an absolute URL.");
}
var deliveryToken = RequireEnv("NOTIFY_SMOKE_NOTIFY_TOKEN");
var deliveryTenant = RequireEnv("NOTIFY_SMOKE_NOTIFY_TENANT");
var tenantHeader = getEnv("NOTIFY_SMOKE_NOTIFY_TENANT_HEADER");
if (string.IsNullOrWhiteSpace(tenantHeader))
{
tenantHeader = "X-StellaOps-Tenant";
}
var timeoutSeconds = ParseInt(getEnv("NOTIFY_SMOKE_NOTIFY_TIMEOUT_SECONDS"), 30, min: 5, max: 120);
var limit = ParseInt(getEnv("NOTIFY_SMOKE_NOTIFY_LIMIT"), 200, min: 50, max: 2000);
var fixedTimeEnv = getEnv("NOTIFY_SMOKE_FIXED_TIME");
var timeProvider = ResolveTimeProvider(fixedTimeEnv);
return new NotifySmokeOptions(
RedisDsn: redisDsn,
RedisStream: redisStream,
ExpectedKinds: expectedKinds,
Lookback: TimeSpan.FromMinutes(lookbackMinutes),
StreamPageSize: streamPageSize,
StreamMaxEntries: streamMaxEntries,
RetryAttempts: retryAttempts,
RetryDelay: TimeSpan.FromMilliseconds(retryDelayMs),
Delivery: new NotifyDeliveryOptions(
BaseUri: baseUri,
Token: deliveryToken,
Tenant: deliveryTenant,
TenantHeader: tenantHeader,
Timeout: TimeSpan.FromSeconds(timeoutSeconds),
Limit: limit),
TimeProvider: timeProvider);
}
private static int ParseInt(string? value, int fallback, int min = 0, int max = int.MaxValue)
{
if (string.IsNullOrWhiteSpace(value))
{
return fallback;
}
if (!int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed))
{
return fallback;
}
if (parsed < min)
{
return min;
}
return parsed > max ? max : parsed;
}
private static TimeProvider ResolveTimeProvider(string? fixedTimeEnv)
{
if (string.IsNullOrWhiteSpace(fixedTimeEnv))
{
return TimeProvider.System;
}
if (!DateTimeOffset.TryParse(fixedTimeEnv, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var fixedTime))
{
throw new InvalidOperationException("NOTIFY_SMOKE_FIXED_TIME must be an ISO-8601 timestamp.");
}
return new FixedTimeProvider(fixedTime);
}
}
public sealed record NotifyDeliveryRecord(string Kind, string? Status);
public sealed class NotifySmokeCheckRunner
{
private readonly NotifySmokeOptions _options;
private readonly Action<string> _info;
private readonly Action<string> _error;
public NotifySmokeCheckRunner(NotifySmokeOptions options, Action<string>? info = null, Action<string>? error = null)
{
_options = options;
_info = info ?? (_ => { });
_error = error ?? (_ => { });
}
public async Task RunAsync(CancellationToken cancellationToken)
{
var now = _options.TimeProvider.GetUtcNow();
var sinceThreshold = now - _options.Lookback;
_info($"[INFO] Checking Redis stream '{_options.RedisStream}' for kinds [{string.Join(", ", _options.ExpectedKinds)}] within the last {_options.Lookback.TotalMinutes:F1} minutes.");
var redisConfig = ConfigurationOptions.Parse(_options.RedisDsn);
redisConfig.AbortOnConnectFail = false;
await using var redisConnection = await ConnectWithRetriesAsync(redisConfig, cancellationToken).ConfigureAwait(false);
var database = redisConnection.GetDatabase();
var recentEntries = await ReadRecentStreamEntriesAsync(database, _options.RedisStream, sinceThreshold, cancellationToken).ConfigureAwait(false);
Ensure(recentEntries.Count > 0, $"No Redis events newer than {sinceThreshold:u} located in stream '{_options.RedisStream}'.");
var missingKinds = FindMissingKinds(recentEntries, _options.ExpectedKinds);
Ensure(missingKinds.Count == 0, $"Missing expected Redis events for kinds: {string.Join(", ", missingKinds)}");
_info("[INFO] Redis event stream contains the expected scanner events.");
var deliveriesUrl = BuildDeliveriesUrl(_options.Delivery.BaseUri, sinceThreshold, _options.Delivery.Limit);
_info($"[INFO] Querying Notify deliveries via {deliveriesUrl}.");
using var httpClient = BuildHttpClient(_options.Delivery);
using var response = await GetWithRetriesAsync(httpClient, deliveriesUrl, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
var body = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException($"Notify deliveries request failed with {(int)response.StatusCode} {response.ReasonPhrase}: {body}");
}
var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
Ensure(!string.IsNullOrWhiteSpace(json), "Notify deliveries response body was empty.");
var deliveries = ParseDeliveries(json);
Ensure(deliveries.Count > 0, "Notify deliveries response did not return any records.");
var missingDeliveryKinds = FindMissingDeliveryKinds(deliveries, _options.ExpectedKinds);
Ensure(missingDeliveryKinds.Count == 0, $"Notify deliveries missing successful records for kinds: {string.Join(", ", missingDeliveryKinds)}");
_info("[INFO] Notify deliveries include the expected scanner events.");
}
internal static IReadOnlyList<NotifyDeliveryRecord> ParseDeliveries(string json)
{
using var document = JsonDocument.Parse(json);
var root = document.RootElement;
IEnumerable<JsonElement> EnumerateDeliveries(JsonElement element)
{
return element.ValueKind switch
{
JsonValueKind.Array => element.EnumerateArray(),
JsonValueKind.Object when element.TryGetProperty("items", out var items) && items.ValueKind == JsonValueKind.Array => items.EnumerateArray(),
_ => throw new InvalidOperationException("Notify deliveries response was not an array or did not contain an 'items' collection.")
};
}
var deliveries = new List<NotifyDeliveryRecord>();
foreach (var delivery in EnumerateDeliveries(root))
{
var kind = delivery.TryGetProperty("kind", out var kindProperty) ? kindProperty.GetString() : null;
if (string.IsNullOrWhiteSpace(kind))
{
continue;
}
var status = delivery.TryGetProperty("status", out var statusProperty) ? statusProperty.GetString() : null;
deliveries.Add(new NotifyDeliveryRecord(kind, status));
}
return deliveries;
}
internal static IReadOnlyList<string> FindMissingDeliveryKinds(IReadOnlyList<NotifyDeliveryRecord> deliveries, IReadOnlyList<string> expectedKinds)
{
var missingKinds = new List<string>();
foreach (var kind in expectedKinds)
{
var found = deliveries.Any(delivery =>
string.Equals(delivery.Kind, kind, StringComparison.OrdinalIgnoreCase) &&
!string.Equals(delivery.Status, "failed", StringComparison.OrdinalIgnoreCase));
if (!found)
{
missingKinds.Add(kind);
}
}
return missingKinds;
}
internal static IReadOnlyList<string> FindMissingKinds(IReadOnlyList<StreamEntry> entries, IReadOnlyList<string> expectedKinds)
{
var missingKinds = new List<string>();
foreach (var kind in expectedKinds)
{
var match = entries.FirstOrDefault(entry =>
{
var entryKind = GetField(entry, "kind");
return entryKind is not null && string.Equals(entryKind, kind, StringComparison.OrdinalIgnoreCase);
});
if (match.Equals(default(StreamEntry)))
{
missingKinds.Add(kind);
}
}
return missingKinds;
}
private async Task<IReadOnlyList<StreamEntry>> ReadRecentStreamEntriesAsync(IDatabase database, string stream, DateTimeOffset sinceThreshold, CancellationToken cancellationToken)
{
var recentEntries = new List<StreamEntry>();
var scannedEntries = 0;
RedisValue maxId = "+";
var reachedThreshold = false;
while (scannedEntries < _options.StreamMaxEntries && !reachedThreshold)
{
cancellationToken.ThrowIfCancellationRequested();
var batchSize = Math.Min(_options.StreamPageSize, _options.StreamMaxEntries - scannedEntries);
var batch = await ReadStreamBatchAsync(database, stream, maxId, batchSize, cancellationToken).ConfigureAwait(false);
if (batch.Length == 0)
{
break;
}
foreach (var entry in batch)
{
scannedEntries++;
if (TryGetStreamTimestamp(entry, out var entryTimestamp))
{
if (entryTimestamp >= sinceThreshold)
{
recentEntries.Add(entry);
}
else
{
reachedThreshold = true;
break;
}
}
else
{
_error($"[WARN] Unable to parse stream entry id '{entry.Id}'.");
}
}
maxId = $"({batch[^1].Id}";
}
if (scannedEntries >= _options.StreamMaxEntries && !reachedThreshold)
{
_error($"[WARN] Reached stream scan limit ({_options.StreamMaxEntries}) before lookback threshold {sinceThreshold:u}.");
}
return recentEntries;
}
private async Task<StreamEntry[]> ReadStreamBatchAsync(IDatabase database, string stream, RedisValue maxId, int batchSize, CancellationToken cancellationToken)
{
for (var attempt = 1; attempt <= _options.RetryAttempts; attempt++)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
return await database.StreamRangeAsync(stream, "-", maxId, batchSize, Order.Descending).ConfigureAwait(false);
}
catch (Exception ex) when (attempt < _options.RetryAttempts)
{
_error($"[WARN] Redis stream range attempt {attempt} failed: {ex.Message}");
await Task.Delay(_options.RetryDelay, cancellationToken).ConfigureAwait(false);
}
}
return await database.StreamRangeAsync(stream, "-", maxId, batchSize, Order.Descending).ConfigureAwait(false);
}
internal static bool TryGetStreamTimestamp(StreamEntry entry, out DateTimeOffset timestamp)
{
var id = entry.Id.ToString();
var dash = id.IndexOf('-', StringComparison.Ordinal);
if (dash <= 0)
{
timestamp = default;
return false;
}
if (!long.TryParse(id[..dash], NumberStyles.Integer, CultureInfo.InvariantCulture, out var millis))
{
timestamp = default;
return false;
}
timestamp = DateTimeOffset.FromUnixTimeMilliseconds(millis);
return true;
}
private static string? GetField(StreamEntry entry, string fieldName)
{
foreach (var pair in entry.Values)
{
if (string.Equals(pair.Name, fieldName, StringComparison.OrdinalIgnoreCase))
{
return pair.Value.ToString();
}
}
return null;
}
private async Task<ConnectionMultiplexer> ConnectWithRetriesAsync(ConfigurationOptions options, CancellationToken cancellationToken)
{
for (var attempt = 1; attempt <= _options.RetryAttempts; attempt++)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
return await ConnectionMultiplexer.ConnectAsync(options).ConfigureAwait(false);
}
catch (Exception ex) when (attempt < _options.RetryAttempts)
{
_error($"[WARN] Redis connection attempt {attempt} failed: {ex.Message}");
await Task.Delay(_options.RetryDelay, cancellationToken).ConfigureAwait(false);
}
}
return await ConnectionMultiplexer.ConnectAsync(options).ConfigureAwait(false);
}
private HttpClient BuildHttpClient(NotifyDeliveryOptions delivery)
{
var httpClient = new HttpClient
{
Timeout = delivery.Timeout,
};
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", delivery.Token);
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
httpClient.DefaultRequestHeaders.Add(delivery.TenantHeader, delivery.Tenant);
return httpClient;
}
private async Task<HttpResponseMessage> GetWithRetriesAsync(HttpClient httpClient, Uri url, CancellationToken cancellationToken)
{
for (var attempt = 1; attempt <= _options.RetryAttempts; attempt++)
{
cancellationToken.ThrowIfCancellationRequested();
var response = await httpClient.GetAsync(url, cancellationToken).ConfigureAwait(false);
if (!ShouldRetry(response.StatusCode) || attempt == _options.RetryAttempts)
{
return response;
}
_error($"[WARN] Notify deliveries attempt {attempt} returned {(int)response.StatusCode}. Retrying after {_options.RetryDelay.TotalMilliseconds:F0} ms.");
response.Dispose();
await Task.Delay(_options.RetryDelay, cancellationToken).ConfigureAwait(false);
}
return await httpClient.GetAsync(url, cancellationToken).ConfigureAwait(false);
}
private static bool ShouldRetry(HttpStatusCode statusCode)
=> statusCode == HttpStatusCode.RequestTimeout
|| statusCode == (HttpStatusCode)429
|| (int)statusCode >= 500;
private static Uri BuildDeliveriesUrl(Uri baseUri, DateTimeOffset sinceThreshold, int limit)
{
var sinceQuery = Uri.EscapeDataString(sinceThreshold.ToString("O", CultureInfo.InvariantCulture));
var builder = new UriBuilder(baseUri)
{
Path = "/api/v1/deliveries",
Query = $"since={sinceQuery}&limit={limit}"
};
return builder.Uri;
}
private static void Ensure(bool condition, string message)
{
if (!condition)
{
throw new InvalidOperationException(message);
}
}
}
internal sealed class FixedTimeProvider : TimeProvider
{
private readonly DateTimeOffset _fixedTime;
private readonly long _timestamp;
public FixedTimeProvider(DateTimeOffset fixedTime)
{
_fixedTime = fixedTime;
_timestamp = fixedTime.UtcTicks;
}
public override DateTimeOffset GetUtcNow() => _fixedTime;
public override long GetTimestamp() => _timestamp;
}

View File

@@ -1,198 +1,3 @@
using System.Globalization;
using System.Net.Http.Headers;
using System.Linq;
using System.Text.Json;
using StackExchange.Redis;
using StellaOps.Tools.NotifySmokeCheck;
static string RequireEnv(string name)
{
var value = Environment.GetEnvironmentVariable(name);
if (string.IsNullOrWhiteSpace(value))
{
throw new InvalidOperationException($"Environment variable '{name}' is required for Notify smoke validation.");
}
return value;
}
static string? GetField(StreamEntry entry, string fieldName)
{
foreach (var pair in entry.Values)
{
if (string.Equals(pair.Name, fieldName, StringComparison.OrdinalIgnoreCase))
{
return pair.Value.ToString();
}
}
return null;
}
static void Ensure(bool condition, string message)
{
if (!condition)
{
throw new InvalidOperationException(message);
}
}
var redisDsn = RequireEnv("NOTIFY_SMOKE_REDIS_DSN");
var redisStream = Environment.GetEnvironmentVariable("NOTIFY_SMOKE_STREAM");
if (string.IsNullOrWhiteSpace(redisStream))
{
redisStream = "stella.events";
}
var expectedKindsEnv = RequireEnv("NOTIFY_SMOKE_EXPECT_KINDS");
var expectedKinds = expectedKindsEnv
.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
.Select(kind => kind.ToLowerInvariant())
.Distinct()
.ToArray();
Ensure(expectedKinds.Length > 0, "Expected at least one event kind in NOTIFY_SMOKE_EXPECT_KINDS.");
var lookbackMinutesEnv = RequireEnv("NOTIFY_SMOKE_LOOKBACK_MINUTES");
if (!double.TryParse(lookbackMinutesEnv, NumberStyles.Any, CultureInfo.InvariantCulture, out var lookbackMinutes))
{
throw new InvalidOperationException("NOTIFY_SMOKE_LOOKBACK_MINUTES must be numeric.");
}
Ensure(lookbackMinutes > 0, "NOTIFY_SMOKE_LOOKBACK_MINUTES must be greater than zero.");
var now = DateTimeOffset.UtcNow;
var sinceThreshold = now - TimeSpan.FromMinutes(Math.Max(1, lookbackMinutes));
Console.WriteLine($" Checking Redis stream '{redisStream}' for kinds [{string.Join(", ", expectedKinds)}] within the last {lookbackMinutes:F1} minutes.");
var redisConfig = ConfigurationOptions.Parse(redisDsn);
redisConfig.AbortOnConnectFail = false;
await using var redisConnection = await ConnectionMultiplexer.ConnectAsync(redisConfig);
var database = redisConnection.GetDatabase();
var streamEntries = await database.StreamRangeAsync(redisStream, "-", "+", count: 200);
if (streamEntries.Length > 1)
{
Array.Reverse(streamEntries);
}
Ensure(streamEntries.Length > 0, $"Redis stream '{redisStream}' is empty.");
var recentEntries = new List<StreamEntry>();
foreach (var entry in streamEntries)
{
var timestampText = GetField(entry, "ts");
if (timestampText is null)
{
continue;
}
if (!DateTimeOffset.TryParse(timestampText, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var entryTimestamp))
{
continue;
}
if (entryTimestamp >= sinceThreshold)
{
recentEntries.Add(entry);
}
}
Ensure(recentEntries.Count > 0, $"No Redis events newer than {sinceThreshold:u} located in stream '{redisStream}'.");
var missingKinds = new List<string>();
foreach (var kind in expectedKinds)
{
var match = recentEntries.FirstOrDefault(entry =>
{
var entryKind = GetField(entry, "kind")?.ToLowerInvariant();
return entryKind == kind;
});
if (match.Equals(default(StreamEntry)))
{
missingKinds.Add(kind);
}
}
Ensure(missingKinds.Count == 0, $"Missing expected Redis events for kinds: {string.Join(", ", missingKinds)}");
Console.WriteLine("✅ Redis event stream contains the expected scanner events.");
var notifyBaseUrl = RequireEnv("NOTIFY_SMOKE_NOTIFY_BASEURL").TrimEnd('/');
var notifyToken = RequireEnv("NOTIFY_SMOKE_NOTIFY_TOKEN");
var notifyTenant = RequireEnv("NOTIFY_SMOKE_NOTIFY_TENANT");
var notifyTenantHeader = Environment.GetEnvironmentVariable("NOTIFY_SMOKE_NOTIFY_TENANT_HEADER");
if (string.IsNullOrWhiteSpace(notifyTenantHeader))
{
notifyTenantHeader = "X-StellaOps-Tenant";
}
var notifyTimeoutSeconds = 30;
var notifyTimeoutEnv = Environment.GetEnvironmentVariable("NOTIFY_SMOKE_NOTIFY_TIMEOUT_SECONDS");
if (!string.IsNullOrWhiteSpace(notifyTimeoutEnv) && int.TryParse(notifyTimeoutEnv, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedTimeout))
{
notifyTimeoutSeconds = Math.Max(5, parsedTimeout);
}
using var httpClient = new HttpClient
{
Timeout = TimeSpan.FromSeconds(notifyTimeoutSeconds),
};
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", notifyToken);
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
httpClient.DefaultRequestHeaders.Add(notifyTenantHeader, notifyTenant);
var sinceQuery = Uri.EscapeDataString(sinceThreshold.ToString("O", CultureInfo.InvariantCulture));
var deliveriesUrl = $"{notifyBaseUrl}/api/v1/deliveries?since={sinceQuery}&limit=200";
Console.WriteLine($" Querying Notify deliveries via {deliveriesUrl}.");
using var response = await httpClient.GetAsync(deliveriesUrl);
if (!response.IsSuccessStatusCode)
{
var body = await response.Content.ReadAsStringAsync();
throw new InvalidOperationException($"Notify deliveries request failed with {(int)response.StatusCode} {response.ReasonPhrase}: {body}");
}
var json = await response.Content.ReadAsStringAsync();
if (string.IsNullOrWhiteSpace(json))
{
throw new InvalidOperationException("Notify deliveries response body was empty.");
}
using var document = JsonDocument.Parse(json);
var root = document.RootElement;
IEnumerable<JsonElement> EnumerateDeliveries(JsonElement element)
{
return element.ValueKind switch
{
JsonValueKind.Array => element.EnumerateArray(),
JsonValueKind.Object when element.TryGetProperty("items", out var items) && items.ValueKind == JsonValueKind.Array => items.EnumerateArray(),
_ => throw new InvalidOperationException("Notify deliveries response was not an array or did not contain an 'items' collection.")
};
}
var deliveries = EnumerateDeliveries(root).ToArray();
Ensure(deliveries.Length > 0, "Notify deliveries response did not return any records.");
var missingDeliveryKinds = new List<string>();
foreach (var kind in expectedKinds)
{
var found = deliveries.Any(delivery =>
delivery.TryGetProperty("kind", out var kindProperty) &&
kindProperty.GetString()?.Equals(kind, StringComparison.OrdinalIgnoreCase) == true &&
delivery.TryGetProperty("status", out var statusProperty) &&
!string.Equals(statusProperty.GetString(), "failed", StringComparison.OrdinalIgnoreCase));
if (!found)
{
missingDeliveryKinds.Add(kind);
}
}
Ensure(missingDeliveryKinds.Count == 0, $"Notify deliveries missing successful records for kinds: {string.Join(", ", missingDeliveryKinds)}");
Console.WriteLine("✅ Notify deliveries include the expected scanner events.");
Console.WriteLine("🎉 Notify smoke validation completed successfully.");
return await NotifySmokeCheckApp.RunAsync(args);

View File

@@ -8,7 +8,7 @@
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\Policy\__Libraries\StellaOps.Policy\StellaOps.Policy.csproj" />
<ProjectReference Include="..\..\__Libraries\StellaOps.Policy.Tools\StellaOps.Policy.Tools.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,56 +1,3 @@
using StellaOps.Policy;
using StellaOps.Policy.Tools;
if (args.Length == 0)
{
Console.Error.WriteLine("Usage: policy-dsl-validator [--strict] [--json] <path-or-glob> [<path-or-glob> ...]");
Console.Error.WriteLine("Example: policy-dsl-validator --strict docs/examples/policies");
return 64; // EX_USAGE
}
var inputs = new List<string>();
var strict = false;
var outputJson = false;
foreach (var arg in args)
{
switch (arg)
{
case "--strict":
case "-s":
strict = true;
break;
case "--json":
case "-j":
outputJson = true;
break;
case "--help":
case "-h":
case "-?":
Console.WriteLine("Usage: policy-dsl-validator [--strict] [--json] <path-or-glob> [<path-or-glob> ...]");
Console.WriteLine("Example: policy-dsl-validator --strict docs/examples/policies");
return 0;
default:
inputs.Add(arg);
break;
}
}
if (inputs.Count == 0)
{
Console.Error.WriteLine("No input files or directories provided.");
return 64; // EX_USAGE
}
var options = new PolicyValidationCliOptions
{
Inputs = inputs,
Strict = strict,
OutputJson = outputJson,
};
var cli = new PolicyValidationCli();
var exitCode = await cli.RunAsync(options, CancellationToken.None);
return exitCode;
return await PolicyDslValidatorApp.RunAsync(args);

View File

@@ -9,14 +9,7 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="NJsonSchema" />
<PackageReference Include="NJsonSchema.NewtonsoftJson" />
<PackageReference Include="NJsonSchema.CodeGeneration.CSharp" />
<PackageReference Include="Newtonsoft.Json" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\Scheduler\__Libraries\StellaOps.Scheduler.Models\StellaOps.Scheduler.Models.csproj" />
<ProjectReference Include="..\..\__Libraries\StellaOps.Policy.Tools\StellaOps.Policy.Tools.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,47 +1,3 @@
using System.Collections.Immutable;
using System.Text.Json;
using System.Text.Json.Serialization;
using NJsonSchema;
using NJsonSchema.Generation;
using Newtonsoft.Json;
using StellaOps.Scheduler.Models;
using StellaOps.Policy.Tools;
var output = args.Length switch
{
0 => Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "docs", "schemas")),
1 => Path.GetFullPath(args[0]),
_ => throw new ArgumentException("Usage: dotnet run --project src/Tools/PolicySchemaExporter -- [outputDirectory]")
};
Directory.CreateDirectory(output);
var generatorSettings = new NJsonSchema.NewtonsoftJson.Generation.NewtonsoftJsonSchemaGeneratorSettings
{
SchemaType = SchemaType.JsonSchema,
DefaultReferenceTypeNullHandling = ReferenceTypeNullHandling.NotNull,
SerializerSettings = new JsonSerializerSettings
{
ContractResolver = new Newtonsoft.Json.Serialization.CamelCasePropertyNamesContractResolver(),
NullValueHandling = NullValueHandling.Ignore,
},
};
var generator = new JsonSchemaGenerator(generatorSettings);
var exports = ImmutableArray.Create(
(FileName: "policy-run-request.schema.json", Type: typeof(PolicyRunRequest)),
(FileName: "policy-run-status.schema.json", Type: typeof(PolicyRunStatus)),
(FileName: "policy-diff-summary.schema.json", Type: typeof(PolicyDiffSummary)),
(FileName: "policy-explain-trace.schema.json", Type: typeof(PolicyExplainTrace))
);
foreach (var export in exports)
{
var schema = generator.Generate(export.Type);
schema.Title = export.Type.Name;
schema.AllowAdditionalProperties = false;
var outputPath = Path.Combine(output, export.FileName);
await File.WriteAllTextAsync(outputPath, schema.ToJson(Formatting.Indented) + Environment.NewLine);
Console.WriteLine($"Wrote {outputPath}");
}
return await PolicySchemaExporterApp.RunAsync(args);

View File

@@ -8,9 +8,7 @@
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\Policy\__Libraries\StellaOps.Policy\StellaOps.Policy.csproj" />
<PackageReference Include="Microsoft.Extensions.Logging" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" />
<ProjectReference Include="..\..\__Libraries\StellaOps.Policy.Tools\StellaOps.Policy.Tools.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,291 +1,3 @@
using System.Collections.Immutable;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Policy;
using StellaOps.Policy.Tools;
var scenarioRoot = "samples/policy/simulations";
string? outputDir = null;
for (var i = 0; i < args.Length; i++)
{
var arg = args[i];
switch (arg)
{
case "--scenario-root":
case "-r":
if (i + 1 >= args.Length)
{
Console.Error.WriteLine("Missing value for --scenario-root.");
return 64;
}
scenarioRoot = args[++i];
break;
case "--output":
case "-o":
if (i + 1 >= args.Length)
{
Console.Error.WriteLine("Missing value for --output.");
return 64;
}
outputDir = args[++i];
break;
case "--help":
case "-h":
case "-?":
PrintUsage();
return 0;
default:
Console.Error.WriteLine($"Unknown argument '{arg}'.");
PrintUsage();
return 64;
}
}
if (!Directory.Exists(scenarioRoot))
{
Console.Error.WriteLine($"Scenario root '{scenarioRoot}' does not exist.");
return 66;
}
var scenarioFiles = Directory.GetFiles(scenarioRoot, "scenario.json", SearchOption.AllDirectories);
if (scenarioFiles.Length == 0)
{
Console.Error.WriteLine($"No scenario.json files found under '{scenarioRoot}'.");
return 0;
}
var loggerFactory = NullLoggerFactory.Instance;
var snapshotStore = new PolicySnapshotStore(
new NullPolicySnapshotRepository(),
new NullPolicyAuditRepository(),
TimeProvider.System,
loggerFactory.CreateLogger<PolicySnapshotStore>());
var previewService = new PolicyPreviewService(snapshotStore, loggerFactory.CreateLogger<PolicyPreviewService>());
var serializerOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
};
var summary = new List<ScenarioResult>();
var success = true;
foreach (var scenarioFile in scenarioFiles.OrderBy(static f => f, StringComparer.OrdinalIgnoreCase))
{
var scenarioText = await File.ReadAllTextAsync(scenarioFile);
var scenario = JsonSerializer.Deserialize<PolicySimulationScenario>(scenarioText, serializerOptions);
if (scenario is null)
{
Console.Error.WriteLine($"Failed to deserialize scenario '{scenarioFile}'.");
success = false;
continue;
}
var repoRoot = Directory.GetCurrentDirectory();
var policyPath = Path.Combine(repoRoot, scenario.PolicyPath);
if (!File.Exists(policyPath))
{
Console.Error.WriteLine($"Policy file '{scenario.PolicyPath}' referenced by scenario '{scenario.Name}' does not exist.");
success = false;
continue;
}
var policyContent = await File.ReadAllTextAsync(policyPath);
var policyFormat = PolicySchema.DetectFormat(policyPath);
var findings = scenario.Findings.Select(ToPolicyFinding).ToImmutableArray();
var baseline = scenario.Baseline?.Select(ToPolicyVerdict).ToImmutableArray() ?? ImmutableArray<PolicyVerdict>.Empty;
var request = new PolicyPreviewRequest(
ImageDigest: $"sha256:simulation-{scenario.Name}",
Findings: findings,
BaselineVerdicts: baseline,
SnapshotOverride: null,
ProposedPolicy: new PolicySnapshotContent(
Content: policyContent,
Format: policyFormat,
Actor: "ci",
Source: "ci/simulation-smoke",
Description: $"CI simulation for scenario '{scenario.Name}'"));
var response = await previewService.PreviewAsync(request, CancellationToken.None);
var scenarioResult = EvaluateScenario(scenario, response);
summary.Add(scenarioResult);
if (!scenarioResult.Success)
{
success = false;
}
}
if (outputDir is not null)
{
Directory.CreateDirectory(outputDir);
var summaryPath = Path.Combine(outputDir, "policy-simulation-summary.json");
await File.WriteAllTextAsync(summaryPath, JsonSerializer.Serialize(summary, new JsonSerializerOptions { WriteIndented = true }));
}
return success ? 0 : 1;
static void PrintUsage()
{
Console.WriteLine("Usage: policy-simulation-smoke [--scenario-root <path>] [--output <dir>]");
Console.WriteLine("Example: policy-simulation-smoke --scenario-root samples/policy/simulations --output artifacts/policy-simulations");
}
static PolicyFinding ToPolicyFinding(ScenarioFinding finding)
{
var tags = finding.Tags is null ? ImmutableArray<string>.Empty : ImmutableArray.CreateRange(finding.Tags);
var severity = Enum.Parse<PolicySeverity>(finding.Severity, ignoreCase: true);
return new PolicyFinding(
finding.FindingId,
severity,
finding.Environment,
finding.Source,
finding.Vendor,
finding.License,
finding.Image,
finding.Repository,
finding.Package,
finding.Purl,
finding.Cve,
finding.Path,
finding.LayerDigest,
tags);
}
static PolicyVerdict ToPolicyVerdict(ScenarioBaseline baseline)
{
var status = Enum.Parse<PolicyVerdictStatus>(baseline.Status, ignoreCase: true);
var inputs = baseline.Inputs?.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase) ?? ImmutableDictionary<string, double>.Empty;
return new PolicyVerdict(
baseline.FindingId,
status,
RuleName: baseline.RuleName,
RuleAction: baseline.RuleAction,
Notes: baseline.Notes,
Score: baseline.Score,
ConfigVersion: baseline.ConfigVersion ?? PolicyScoringConfig.Default.Version,
Inputs: inputs,
QuietedBy: null,
Quiet: false,
UnknownConfidence: null,
ConfidenceBand: null,
UnknownAgeDays: null,
SourceTrust: null,
Reachability: null);
}
static ScenarioResult EvaluateScenario(PolicySimulationScenario scenario, PolicyPreviewResponse response)
{
var result = new ScenarioResult(scenario.Name);
if (!response.Success)
{
result.Failures.Add("Preview failed.");
return result with { Success = false, ChangedCount = response.ChangedCount };
}
var diffs = response.Diffs.ToDictionary(diff => diff.Projected.FindingId, StringComparer.OrdinalIgnoreCase);
foreach (var expected in scenario.ExpectedDiffs)
{
if (!diffs.TryGetValue(expected.FindingId, out var diff))
{
result.Failures.Add($"Expected finding '{expected.FindingId}' missing from diff.");
continue;
}
var projectedStatus = diff.Projected.Status.ToString();
result.ActualStatuses[expected.FindingId] = projectedStatus;
if (!string.Equals(projectedStatus, expected.Status, StringComparison.OrdinalIgnoreCase))
{
result.Failures.Add($"Finding '{expected.FindingId}' expected status '{expected.Status}' but was '{projectedStatus}'.");
}
}
foreach (var diff in diffs.Values)
{
if (!result.ActualStatuses.ContainsKey(diff.Projected.FindingId))
{
result.ActualStatuses[diff.Projected.FindingId] = diff.Projected.Status.ToString();
}
}
var success = result.Failures.Count == 0;
return result with
{
Success = success,
ChangedCount = response.ChangedCount
};
}
internal sealed record PolicySimulationScenario
{
public string Name { get; init; } = "scenario";
public string PolicyPath { get; init; } = string.Empty;
public List<ScenarioFinding> Findings { get; init; } = new();
public List<ScenarioExpectedDiff> ExpectedDiffs { get; init; } = new();
public List<ScenarioBaseline>? Baseline { get; init; }
}
internal sealed record ScenarioFinding
{
public string FindingId { get; init; } = string.Empty;
public string Severity { get; init; } = "Low";
public string? Environment { get; init; }
public string? Source { get; init; }
public string? Vendor { get; init; }
public string? License { get; init; }
public string? Image { get; init; }
public string? Repository { get; init; }
public string? Package { get; init; }
public string? Purl { get; init; }
public string? Cve { get; init; }
public string? Path { get; init; }
public string? LayerDigest { get; init; }
public string[]? Tags { get; init; }
}
internal sealed record ScenarioExpectedDiff
{
public string FindingId { get; init; } = string.Empty;
public string Status { get; init; } = "Pass";
}
internal sealed record ScenarioBaseline
{
public string FindingId { get; init; } = string.Empty;
public string Status { get; init; } = "Pass";
public string? RuleName { get; init; }
public string? RuleAction { get; init; }
public string? Notes { get; init; }
public double Score { get; init; }
public string? ConfigVersion { get; init; }
public Dictionary<string, double>? Inputs { get; init; }
}
internal sealed record ScenarioResult(string ScenarioName)
{
public bool Success { get; init; } = true;
public int ChangedCount { get; init; }
public List<string> Failures { get; } = new();
public Dictionary<string, string> ActualStatuses { get; } = new(StringComparer.OrdinalIgnoreCase);
}
internal sealed class NullPolicySnapshotRepository : IPolicySnapshotRepository
{
public Task AddAsync(PolicySnapshot snapshot, CancellationToken cancellationToken = default) => Task.CompletedTask;
public Task<PolicySnapshot?> GetLatestAsync(CancellationToken cancellationToken = default) => Task.FromResult<PolicySnapshot?>(null);
public Task<IReadOnlyList<PolicySnapshot>> ListAsync(int limit, CancellationToken cancellationToken = default)
=> Task.FromResult<IReadOnlyList<PolicySnapshot>>(Array.Empty<PolicySnapshot>());
}
internal sealed class NullPolicyAuditRepository : IPolicyAuditRepository
{
public Task AddAsync(PolicyAuditEntry entry, CancellationToken cancellationToken = default) => Task.CompletedTask;
public Task<IReadOnlyList<PolicyAuditEntry>> ListAsync(int limit, CancellationToken cancellationToken = default)
=> Task.FromResult<IReadOnlyList<PolicyAuditEntry>>(Array.Empty<PolicyAuditEntry>());
}
return await PolicySimulationSmokeApp.RunAsync(args);

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("RustFsMigrator.Tests")]

View File

@@ -1,8 +1,10 @@
using System.Globalization;
using System.Net;
using System.Net.Http.Headers;
using Amazon;
using Amazon.Runtime;
using Amazon.S3;
using Amazon.S3.Model;
using System.Net.Http.Headers;
var options = MigrationOptions.Parse(args);
if (options is null)
@@ -36,6 +38,11 @@ if (!string.IsNullOrWhiteSpace(options.S3Region))
using var s3Client = CreateS3Client(options, s3Config);
using var httpClient = CreateRustFsClient(options);
using var cts = options.TimeoutSeconds > 0
? new CancellationTokenSource(TimeSpan.FromSeconds(options.TimeoutSeconds))
: null;
var cancellationToken = cts?.Token ?? CancellationToken.None;
var listRequest = new ListObjectsV2Request
{
BucketName = options.S3Bucket,
@@ -46,69 +53,52 @@ var listRequest = new ListObjectsV2Request
var migrated = 0;
var skipped = 0;
do
try
{
var response = await s3Client.ListObjectsV2Async(listRequest).ConfigureAwait(false);
foreach (var entry in response.S3Objects)
do
{
if (entry.Size == 0 && entry.Key.EndsWith('/'))
var response = await ExecuteWithRetriesAsync<ListObjectsV2Response>(
token => s3Client.ListObjectsV2Async(listRequest, token),
"ListObjectsV2",
options,
cancellationToken).ConfigureAwait(false);
foreach (var entry in response.S3Objects)
{
skipped++;
continue;
if (entry.Size == 0 && entry.Key.EndsWith("/", StringComparison.Ordinal))
{
skipped++;
continue;
}
Console.WriteLine($"Migrating {entry.Key} ({entry.Size} bytes)...");
if (options.DryRun)
{
migrated++;
continue;
}
try
{
await UploadObjectAsync(s3Client, httpClient, options, entry, cancellationToken).ConfigureAwait(false);
migrated++;
}
catch (Exception ex) when (ex is not OperationCanceledException)
{
Console.Error.WriteLine($"Failed to upload {entry.Key}: {ex.Message}");
return 2;
}
}
Console.WriteLine($"Migrating {entry.Key} ({entry.Size} bytes)...");
if (options.DryRun)
{
migrated++;
continue;
}
using var getResponse = await s3Client.GetObjectAsync(new GetObjectRequest
{
BucketName = options.S3Bucket,
Key = entry.Key,
}).ConfigureAwait(false);
await using var memory = new MemoryStream();
await getResponse.ResponseStream.CopyToAsync(memory).ConfigureAwait(false);
memory.Position = 0;
using var request = new HttpRequestMessage(HttpMethod.Put, BuildRustFsUri(options, entry.Key))
{
Content = new ByteArrayContent(memory.ToArray()),
};
request.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/octet-stream");
if (options.Immutable)
{
request.Headers.TryAddWithoutValidation("X-RustFS-Immutable", "true");
}
if (options.RetentionSeconds is { } retainSeconds)
{
request.Headers.TryAddWithoutValidation("X-RustFS-Retain-Seconds", retainSeconds.ToString());
}
if (!string.IsNullOrWhiteSpace(options.RustFsApiKeyHeader) && !string.IsNullOrWhiteSpace(options.RustFsApiKey))
{
request.Headers.TryAddWithoutValidation(options.RustFsApiKeyHeader!, options.RustFsApiKey!);
}
using var responseMessage = await httpClient.SendAsync(request).ConfigureAwait(false);
if (!responseMessage.IsSuccessStatusCode)
{
var error = await responseMessage.Content.ReadAsStringAsync().ConfigureAwait(false);
Console.Error.WriteLine($"Failed to upload {entry.Key}: {(int)responseMessage.StatusCode} {responseMessage.ReasonPhrase}\n{error}");
return 2;
}
migrated++;
}
listRequest.ContinuationToken = response.NextContinuationToken;
} while (!string.IsNullOrEmpty(listRequest.ContinuationToken));
listRequest.ContinuationToken = response.NextContinuationToken;
} while (!string.IsNullOrEmpty(listRequest.ContinuationToken));
}
catch (OperationCanceledException)
{
Console.Error.WriteLine("Migration canceled.");
return 3;
}
Console.WriteLine($"Migration complete. Migrated {migrated} objects. Skipped {skipped} directory markers.");
return 0;
@@ -140,18 +130,112 @@ static HttpClient CreateRustFsClient(MigrationOptions options)
return client;
}
static Uri BuildRustFsUri(MigrationOptions options, string key)
static async Task UploadObjectAsync(IAmazonS3 s3Client, HttpClient httpClient, MigrationOptions options, S3Object entry, CancellationToken cancellationToken)
{
var normalized = string.Join('/', key
.Split('/', StringSplitOptions.RemoveEmptyEntries)
.Select(Uri.EscapeDataString));
var builder = new UriBuilder(options.RustFsEndpoint)
await ExecuteWithRetriesAsync<object>(async token =>
{
Path = $"/api/v1/buckets/{Uri.EscapeDataString(options.RustFsBucket)}/objects/{normalized}",
using var getResponse = await s3Client.GetObjectAsync(new GetObjectRequest
{
BucketName = options.S3Bucket,
Key = entry.Key,
}, token).ConfigureAwait(false);
using var request = BuildRustFsRequest(options, entry.Key, getResponse);
using var responseMessage = await httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, token).ConfigureAwait(false);
if (!responseMessage.IsSuccessStatusCode)
{
var error = await responseMessage.Content.ReadAsStringAsync(token).ConfigureAwait(false);
if (ShouldRetry(responseMessage.StatusCode))
{
throw new RetryableException($"RustFS upload returned {(int)responseMessage.StatusCode} {responseMessage.ReasonPhrase}: {error}");
}
throw new InvalidOperationException($"RustFS upload returned {(int)responseMessage.StatusCode} {responseMessage.ReasonPhrase}: {error}");
}
return null!;
}, $"Upload {entry.Key}", options, cancellationToken).ConfigureAwait(false);
}
static HttpRequestMessage BuildRustFsRequest(MigrationOptions options, string key, GetObjectResponse getResponse)
{
var request = new HttpRequestMessage(HttpMethod.Put, RustFsMigratorPaths.BuildRustFsUri(options, key))
{
Content = new StreamContent(getResponse.ResponseStream),
};
return builder.Uri;
request.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/octet-stream");
if (getResponse.Headers.ContentLength > 0)
{
request.Content.Headers.ContentLength = getResponse.Headers.ContentLength;
}
if (options.Immutable)
{
request.Headers.TryAddWithoutValidation("X-RustFS-Immutable", "true");
}
if (options.RetentionSeconds is { } retainSeconds)
{
request.Headers.TryAddWithoutValidation("X-RustFS-Retain-Seconds", retainSeconds.ToString(CultureInfo.InvariantCulture));
}
if (!string.IsNullOrWhiteSpace(options.RustFsApiKeyHeader) && !string.IsNullOrWhiteSpace(options.RustFsApiKey))
{
request.Headers.TryAddWithoutValidation(options.RustFsApiKeyHeader!, options.RustFsApiKey!);
}
return request;
}
static async Task<T> ExecuteWithRetriesAsync<T>(Func<CancellationToken, Task<T>> action, string operation, MigrationOptions options, CancellationToken cancellationToken)
{
Exception? last = null;
for (var attempt = 1; attempt <= options.RetryAttempts; attempt++)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
return await action(cancellationToken).ConfigureAwait(false);
}
catch (Exception ex) when (ShouldRetryException(ex) && attempt < options.RetryAttempts)
{
last = ex;
Console.Error.WriteLine($"[WARN] {operation} attempt {attempt} failed: {ex.Message}");
await Task.Delay(ComputeBackoffDelay(attempt, options.RetryDelayMs), cancellationToken).ConfigureAwait(false);
}
}
if (last is not null)
{
throw last;
}
return await action(cancellationToken).ConfigureAwait(false);
}
static TimeSpan ComputeBackoffDelay(int attempt, int retryDelayMs)
{
var multiplier = Math.Pow(2, Math.Max(0, attempt - 1));
var delayMs = Math.Min(retryDelayMs * multiplier, 5000);
return TimeSpan.FromMilliseconds(delayMs);
}
static bool ShouldRetryException(Exception ex)
=> ex is RetryableException or HttpRequestException or AmazonS3Exception or IOException;
static bool ShouldRetry(HttpStatusCode statusCode)
=> statusCode == HttpStatusCode.RequestTimeout
|| statusCode == (HttpStatusCode)429
|| (int)statusCode >= 500;
internal sealed class RetryableException : Exception
{
public RetryableException(string message) : base(message)
{
}
}
internal sealed record MigrationOptions
@@ -192,6 +276,15 @@ internal sealed record MigrationOptions
public bool DryRun { get; init; }
= false;
public int RetryAttempts { get; init; }
= 3;
public int RetryDelayMs { get; init; }
= 250;
public int TimeoutSeconds { get; init; }
= 0;
public static MigrationOptions? Parse(string[] args)
{
var builder = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase);
@@ -202,7 +295,8 @@ internal sealed record MigrationOptions
if (key.StartsWith("--", StringComparison.OrdinalIgnoreCase))
{
var normalized = key[2..];
if (string.Equals(normalized, "immutable", StringComparison.OrdinalIgnoreCase) || string.Equals(normalized, "dry-run", StringComparison.OrdinalIgnoreCase))
if (string.Equals(normalized, "immutable", StringComparison.OrdinalIgnoreCase) ||
string.Equals(normalized, "dry-run", StringComparison.OrdinalIgnoreCase))
{
builder[normalized] = "true";
continue;
@@ -239,7 +333,7 @@ internal sealed record MigrationOptions
int? retentionSeconds = null;
if (builder.TryGetValue("retain-days", out var retainStr) && !string.IsNullOrWhiteSpace(retainStr))
{
if (double.TryParse(retainStr, out var days) && days > 0)
if (double.TryParse(retainStr, NumberStyles.Float, CultureInfo.InvariantCulture, out var days) && days > 0)
{
retentionSeconds = (int)Math.Ceiling(days * 24 * 60 * 60);
}
@@ -250,6 +344,10 @@ internal sealed record MigrationOptions
}
}
var retryAttempts = ParseIntOption(builder, "retry-attempts", 3, min: 1, max: 10);
var retryDelayMs = ParseIntOption(builder, "retry-delay-ms", 250, min: 50, max: 2000);
var timeoutSeconds = ParseIntOption(builder, "timeout-seconds", 0, min: 0, max: 3600);
return new MigrationOptions
{
S3Bucket = bucket,
@@ -265,6 +363,9 @@ internal sealed record MigrationOptions
Immutable = builder.ContainsKey("immutable"),
RetentionSeconds = retentionSeconds,
DryRun = builder.ContainsKey("dry-run"),
RetryAttempts = retryAttempts,
RetryDelayMs = retryDelayMs,
TimeoutSeconds = timeoutSeconds,
};
}
@@ -281,6 +382,29 @@ internal sealed record MigrationOptions
[--prefix scanner/] \
[--immutable] \
[--retain-days 365] \
[--retry-attempts 3] \
[--retry-delay-ms 250] \
[--timeout-seconds 0] \
[--dry-run]");
}
private static int ParseIntOption(Dictionary<string, string?> values, string name, int fallback, int min, int max)
{
if (!values.TryGetValue(name, out var raw) || string.IsNullOrWhiteSpace(raw))
{
return fallback;
}
if (!int.TryParse(raw, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed))
{
return fallback;
}
if (parsed < min)
{
return min;
}
return parsed > max ? max : parsed;
}
}

View File

@@ -0,0 +1,16 @@
internal static class RustFsMigratorPaths
{
internal static Uri BuildRustFsUri(MigrationOptions options, string key)
{
var normalized = string.Join('/', key
.Split('/', StringSplitOptions.RemoveEmptyEntries)
.Select(Uri.EscapeDataString));
var builder = new UriBuilder(options.RustFsEndpoint)
{
Path = $"/api/v1/buckets/{Uri.EscapeDataString(options.RustFsBucket)}/objects/{normalized}",
};
return builder.Uri;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\FixtureUpdater\FixtureUpdater.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,142 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using StellaOps.Tools.FixtureUpdater;
using Xunit;
public sealed class FixtureUpdaterRunnerTests
{
[Fact]
public void Run_IsDeterministic_And_WritesGhsaFixtures()
{
var repoRoot = FindRepoRoot();
using var temp = new TempDirectory();
var osvDir = Path.Combine(temp.Path, "osv");
var ghsaDir = Path.Combine(temp.Path, "ghsa");
var nvdDir = Path.Combine(temp.Path, "nvd");
Directory.CreateDirectory(osvDir);
Directory.CreateDirectory(ghsaDir);
Directory.CreateDirectory(nvdDir);
File.Copy(
Path.Combine(repoRoot, "src", "Concelier", "__Tests", "StellaOps.Concelier.Connector.Osv.Tests", "Fixtures", "osv-ghsa.raw-osv.json"),
Path.Combine(osvDir, "osv-ghsa.raw-osv.json"));
File.Copy(
Path.Combine(repoRoot, "src", "Concelier", "__Tests", "StellaOps.Concelier.Connector.Ghsa.Tests", "Fixtures", "osv-ghsa.raw-ghsa.json"),
Path.Combine(ghsaDir, "osv-ghsa.raw-ghsa.json"));
var options = new FixtureUpdaterOptions(
RepoRoot: null,
OsvFixturesPath: osvDir,
GhsaFixturesPath: ghsaDir,
NvdFixturesPath: nvdDir,
FixedTime: new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero));
var firstResult = new FixtureUpdaterRunner(options).Run();
Assert.Equal(0, firstResult.ErrorCount);
var firstOutputs = ReadOutputs(temp.Path);
var secondResult = new FixtureUpdaterRunner(options).Run();
Assert.Equal(0, secondResult.ErrorCount);
var secondOutputs = ReadOutputs(temp.Path);
Assert.Equal(firstOutputs.Count, secondOutputs.Count);
foreach (var (path, content) in firstOutputs)
{
Assert.True(secondOutputs.TryGetValue(path, out var secondContent));
Assert.Equal(content, secondContent);
}
Assert.True(File.Exists(Path.Combine(ghsaDir, "osv-ghsa.ghsa.json")));
Assert.False(File.Exists(Path.Combine(osvDir, "osv-ghsa.ghsa.json")));
}
[Fact]
public void Run_Reports_ParseErrors_With_Context()
{
var repoRoot = FindRepoRoot();
using var temp = new TempDirectory();
var osvDir = Path.Combine(temp.Path, "osv");
var ghsaDir = Path.Combine(temp.Path, "ghsa");
var nvdDir = Path.Combine(temp.Path, "nvd");
Directory.CreateDirectory(osvDir);
Directory.CreateDirectory(ghsaDir);
Directory.CreateDirectory(nvdDir);
File.Copy(
Path.Combine(repoRoot, "src", "Concelier", "__Tests", "StellaOps.Concelier.Connector.Osv.Tests", "Fixtures", "osv-ghsa.raw-osv.json"),
Path.Combine(osvDir, "osv-ghsa.raw-osv.json"));
File.WriteAllText(Path.Combine(ghsaDir, "osv-ghsa.raw-ghsa.json"), "{ broken json }");
var errors = new List<string>();
var options = new FixtureUpdaterOptions(
RepoRoot: null,
OsvFixturesPath: osvDir,
GhsaFixturesPath: ghsaDir,
NvdFixturesPath: nvdDir,
FixedTime: new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero));
var result = new FixtureUpdaterRunner(options, _ => { }, message => errors.Add(message)).Run();
Assert.True(result.ErrorCount > 0);
Assert.Contains(errors, message => message.Contains("osv-ghsa.raw-ghsa.json", StringComparison.Ordinal));
}
private static Dictionary<string, string> ReadOutputs(string root)
{
var files = Directory.GetFiles(root, "*.json", SearchOption.AllDirectories)
.OrderBy(path => path, StringComparer.Ordinal)
.ToArray();
var outputs = new Dictionary<string, string>(StringComparer.Ordinal);
foreach (var file in files)
{
var relative = Path.GetRelativePath(root, file);
var content = File.ReadAllText(file).ReplaceLineEndings("\n");
outputs[relative] = content;
}
return outputs;
}
private static string FindRepoRoot()
{
var current = new DirectoryInfo(AppContext.BaseDirectory);
while (current is not null)
{
var solution = Path.Combine(current.FullName, "src", "StellaOps.sln");
if (File.Exists(solution))
{
return current.FullName;
}
current = current.Parent;
}
throw new InvalidOperationException("Repository root not found.");
}
private sealed class TempDirectory : IDisposable
{
public TempDirectory()
{
Path = System.IO.Path.Combine(System.IO.Path.GetTempPath(), $"fixture-updater-{Guid.NewGuid():N}");
Directory.CreateDirectory(Path);
}
public string Path { get; }
public void Dispose()
{
if (Directory.Exists(Path))
{
Directory.Delete(Path, recursive: true);
}
}
}
}

View File

@@ -0,0 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\LanguageAnalyzerSmoke\LanguageAnalyzerSmoke.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,76 @@
using System;
using System.Collections.Generic;
using StellaOps.Tools.LanguageAnalyzerSmoke;
using Xunit;
public sealed class LanguageAnalyzerSmokeRunnerTests
{
[Fact]
public void Resolve_UsesProfileDefaults_WhenOverridesMissing()
{
var profile = AnalyzerProfileCatalog.GetProfile("python");
var options = SmokeOptions.Resolve(
repoRoot: "C:\\repo",
analyzerId: "python",
pluginDirectoryName: null,
fixtureRelativePath: null,
allowGoldenDrift: false,
fixedTime: new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero),
useSystemTime: false,
timeoutSeconds: 120);
Assert.Equal(profile.PluginDirectory, options.PluginDirectoryName);
Assert.Equal(profile.FixtureRelativePath, options.FixtureRelativePath);
Assert.Equal(profile.AnalyzerId, options.AnalyzerId);
}
[Fact]
public void ValidateManifest_RejectsMissingCapabilities()
{
var profile = AnalyzerProfileCatalog.GetProfile("python");
var manifest = new PluginManifest
{
SchemaVersion = "1.0",
Id = profile.ExpectedPluginId,
RequiresRestart = true,
EntryPoint = new PluginEntryPoint
{
Type = "dotnet",
TypeName = profile.ExpectedEntryPointType,
Assembly = "Plugin.dll"
},
Capabilities = Array.Empty<string>()
};
var exception = Assert.Throws<InvalidOperationException>(() =>
LanguageAnalyzerSmokeRunner.ValidateManifest(manifest, profile, profile.PluginDirectory));
Assert.Contains("capability", exception.Message, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public void CompareGoldenSnapshot_Throws_WhenDriftNotAllowed()
{
Assert.Throws<InvalidOperationException>(() =>
LanguageAnalyzerSmokeRunner.CompareGoldenSnapshot(
scenarioName: "sample",
actualJson: "{\"a\":1}",
goldenNormalized: "{\"a\":2}",
allowGoldenDrift: false,
info: _ => { }));
}
[Fact]
public void CompareGoldenSnapshot_AllowsWhenDriftAllowed()
{
var warnings = new List<string>();
LanguageAnalyzerSmokeRunner.CompareGoldenSnapshot(
scenarioName: "sample",
actualJson: "{\"a\":1}",
goldenNormalized: "{\"a\":2}",
allowGoldenDrift: true,
info: message => warnings.Add(message));
Assert.Single(warnings);
Assert.Contains("golden", warnings[0], StringComparison.OrdinalIgnoreCase);
}
}

View File

@@ -0,0 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\NotifySmokeCheck\NotifySmokeCheck.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,66 @@
using System;
using System.Collections.Generic;
using StackExchange.Redis;
using StellaOps.Tools.NotifySmokeCheck;
using Xunit;
public sealed class NotifySmokeCheckRunnerTests
{
[Fact]
public void FromEnvironment_ParsesExpectedKinds()
{
var env = new Dictionary<string, string>(StringComparer.Ordinal)
{
["NOTIFY_SMOKE_REDIS_DSN"] = "localhost:6379",
["NOTIFY_SMOKE_EXPECT_KINDS"] = "scan, scan, Alert",
["NOTIFY_SMOKE_LOOKBACK_MINUTES"] = "15",
["NOTIFY_SMOKE_NOTIFY_BASEURL"] = "https://notify.local",
["NOTIFY_SMOKE_NOTIFY_TOKEN"] = "token",
["NOTIFY_SMOKE_NOTIFY_TENANT"] = "tenant"
};
var options = NotifySmokeOptions.FromEnvironment(name => env.TryGetValue(name, out var value) ? value : null);
Assert.Equal(new[] { "scan", "alert" }, options.ExpectedKinds);
}
[Fact]
public void FromEnvironment_UsesFixedTimeWhenProvided()
{
var env = new Dictionary<string, string>(StringComparer.Ordinal)
{
["NOTIFY_SMOKE_REDIS_DSN"] = "localhost:6379",
["NOTIFY_SMOKE_EXPECT_KINDS"] = "scan",
["NOTIFY_SMOKE_LOOKBACK_MINUTES"] = "5",
["NOTIFY_SMOKE_NOTIFY_BASEURL"] = "https://notify.local",
["NOTIFY_SMOKE_NOTIFY_TOKEN"] = "token",
["NOTIFY_SMOKE_NOTIFY_TENANT"] = "tenant",
["NOTIFY_SMOKE_FIXED_TIME"] = "2025-01-02T03:04:05Z"
};
var options = NotifySmokeOptions.FromEnvironment(name => env.TryGetValue(name, out var value) ? value : null);
Assert.Equal(new DateTimeOffset(2025, 1, 2, 3, 4, 5, TimeSpan.Zero), options.TimeProvider.GetUtcNow());
}
[Fact]
public void ParseDeliveries_HandlesItemsArray()
{
var json = "{\"items\":[{\"kind\":\"scan\",\"status\":\"delivered\"},{\"kind\":\"vex\",\"status\":\"failed\"}]}";
var deliveries = NotifySmokeCheckRunner.ParseDeliveries(json);
Assert.Equal(2, deliveries.Count);
Assert.Equal("scan", deliveries[0].Kind, StringComparer.OrdinalIgnoreCase);
Assert.Equal("delivered", deliveries[0].Status, StringComparer.OrdinalIgnoreCase);
}
[Fact]
public void TryGetStreamTimestamp_ParsesEntryId()
{
var entry = new StreamEntry("1700000000000-0", Array.Empty<NameValueEntry>());
var success = NotifySmokeCheckRunner.TryGetStreamTimestamp(entry, out var timestamp);
Assert.True(success);
Assert.Equal(DateTimeOffset.FromUnixTimeMilliseconds(1700000000000), timestamp);
}
}

View File

@@ -0,0 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\PolicyDslValidator\PolicyDslValidator.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,44 @@
using StellaOps.Policy;
using StellaOps.Policy.Tools;
public sealed class PolicyDslValidatorAppTests
{
[Fact]
public async Task RunAsync_ReturnsUsageExitCode_OnMissingInputs()
{
var runner = new CapturingRunner();
var exitCode = await PolicyDslValidatorApp.RunAsync(Array.Empty<string>(), runner);
Assert.Equal(64, exitCode);
Assert.False(runner.WasCalled);
}
[Fact]
public async Task Command_CapturesStrictAndJson()
{
var runner = new CapturingRunner();
var exitCode = await PolicyDslValidatorApp.RunAsync(new[] { "--strict", "--json", "policy.json" }, runner);
Assert.Equal(0, exitCode);
Assert.True(runner.WasCalled);
Assert.NotNull(runner.CapturedOptions);
Assert.True(runner.CapturedOptions!.Strict);
Assert.True(runner.CapturedOptions!.OutputJson);
Assert.Single(runner.CapturedOptions!.Inputs);
Assert.Equal("policy.json", runner.CapturedOptions!.Inputs[0]);
}
private sealed class CapturingRunner : IPolicyValidationRunner
{
public PolicyValidationCliOptions? CapturedOptions { get; private set; }
public bool WasCalled { get; private set; }
public Task<int> RunAsync(PolicyValidationCliOptions options, CancellationToken cancellationToken)
{
CapturedOptions = options;
WasCalled = true;
return Task.FromResult(0);
}
}
}

View File

@@ -0,0 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\PolicySchemaExporter\PolicySchemaExporter.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,46 @@
using System.IO;
using StellaOps.Policy.Tools;
public sealed class PolicySchemaExporterTests
{
[Fact]
public void GenerateSchemas_IsStableAndHasExpectedNames()
{
var exports = PolicySchemaExporterSchema.BuildExports();
var first = PolicySchemaExporterSchema.GenerateSchemas(
PolicySchemaExporterSchema.CreateGenerator(),
exports);
var second = PolicySchemaExporterSchema.GenerateSchemas(
PolicySchemaExporterSchema.CreateGenerator(),
exports);
Assert.Equal(exports.Length, first.Count);
foreach (var export in exports)
{
Assert.True(first.ContainsKey(export.FileName));
Assert.True(second.ContainsKey(export.FileName));
Assert.Equal(first[export.FileName], second[export.FileName]);
}
}
[Fact]
public void ResolveOutputDirectory_UsesRepoRootForRelativeOutput()
{
var repoRoot = Path.Combine(Path.GetTempPath(), "schema-exporter");
var resolved = PolicySchemaExporterPaths.ResolveOutputDirectory("out", repoRoot);
var expected = Path.GetFullPath(Path.Combine(repoRoot, "out"));
Assert.Equal(expected, resolved);
}
[Fact]
public void ResolveDefaultOutputDirectory_UsesRepoRootDocsSchemas()
{
var repoRoot = Path.Combine(Path.GetTempPath(), "schema-exporter-root");
var resolved = PolicySchemaExporterPaths.ResolveDefaultOutputDirectory(repoRoot);
var expected = Path.GetFullPath(Path.Combine(repoRoot, "docs", "schemas"));
Assert.Equal(expected, resolved);
}
}

View File

@@ -0,0 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\PolicySimulationSmoke\PolicySimulationSmoke.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,67 @@
using System.Collections.Immutable;
using StellaOps.Policy;
using StellaOps.Policy.Tools;
public sealed class PolicySimulationSmokeEvaluatorTests
{
[Fact]
public void EvaluateScenario_FailsWhenPreviewFails()
{
var scenario = new PolicySimulationScenario { Name = "demo" };
var response = new PolicyPreviewResponse(
Success: false,
PolicyDigest: "digest",
RevisionId: null,
Issues: ImmutableArray<PolicyIssue>.Empty,
Diffs: ImmutableArray<PolicyVerdictDiff>.Empty,
ChangedCount: 0);
var result = PolicySimulationSmokeEvaluator.EvaluateScenario(scenario, response);
Assert.False(result.Success);
Assert.Contains("Preview failed.", result.Failures);
}
[Fact]
public void EvaluateScenario_FailsWhenExpectedDiffMissing()
{
var scenario = new PolicySimulationScenario
{
Name = "demo",
ExpectedDiffs = new List<ScenarioExpectedDiff>
{
new ScenarioExpectedDiff { FindingId = "F-1", Status = "Blocked" }
}
};
var baseline = new PolicyVerdict("F-2", PolicyVerdictStatus.Pass);
var projected = new PolicyVerdict("F-2", PolicyVerdictStatus.Pass);
var diff = new PolicyVerdictDiff(baseline, projected);
var response = new PolicyPreviewResponse(
Success: true,
PolicyDigest: "digest",
RevisionId: null,
Issues: ImmutableArray<PolicyIssue>.Empty,
Diffs: ImmutableArray.Create(diff),
ChangedCount: 1);
var result = PolicySimulationSmokeEvaluator.EvaluateScenario(scenario, response);
Assert.False(result.Success);
Assert.Contains("Expected finding 'F-1' missing from diff.", result.Failures);
}
[Fact]
public async Task RunAsync_ReturnsNoInputWhenScenarioRootMissing()
{
var runner = new PolicySimulationSmokeRunner();
var missingRoot = Path.Combine(Path.GetTempPath(), "stellaops-missing-" + Guid.NewGuid().ToString("N"));
var exitCode = await runner.RunAsync(
new PolicySimulationSmokeOptions { ScenarioRoot = missingRoot },
CancellationToken.None);
Assert.Equal(66, exitCode);
}
}

View File

@@ -0,0 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\RustFsMigrator\RustFsMigrator.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,42 @@
using System;
using Xunit;
public sealed class RustFsMigratorTests
{
[Fact]
public void Parse_ExtractsRetryAndTimeoutOptions()
{
var options = MigrationOptions.Parse(new[]
{
"--s3-bucket", "bucket",
"--rustfs-endpoint", "http://rustfs:8080",
"--rustfs-bucket", "target",
"--retry-attempts", "5",
"--retry-delay-ms", "500",
"--timeout-seconds", "60",
"--retain-days", "1.5"
});
Assert.NotNull(options);
Assert.Equal(5, options!.RetryAttempts);
Assert.Equal(500, options.RetryDelayMs);
Assert.Equal(60, options.TimeoutSeconds);
Assert.NotNull(options.RetentionSeconds);
Assert.True(options.RetentionSeconds > 0);
}
[Fact]
public void BuildRustFsUri_EncodesObjectKey()
{
var options = new MigrationOptions
{
RustFsEndpoint = "https://rustfs.local",
RustFsBucket = "scanner artifacts"
};
var uri = RustFsMigratorPaths.BuildRustFsUri(options, "path/with space/file.txt");
Assert.Equal("https", uri.Scheme);
Assert.Contains("scanner%20artifacts", uri.AbsoluteUri, StringComparison.Ordinal);
Assert.Contains("path/with%20space/file.txt", uri.AbsoluteUri, StringComparison.Ordinal);
}
}

View File

@@ -0,0 +1,33 @@
using System.CommandLine;
using StellaOps.Policy;
namespace StellaOps.Policy.Tools;
public static class PolicyDslValidatorApp
{
public static Task<int> RunAsync(string[] args)
{
var runner = new PolicyValidationRunner(new PolicyValidationCli());
return RunAsync(args, runner);
}
public static async Task<int> RunAsync(string[] args, IPolicyValidationRunner runner)
{
if (runner is null)
{
throw new ArgumentNullException(nameof(runner));
}
var root = PolicyDslValidatorCommand.Build(runner);
var parseResult = root.Parse(args, new ParserConfiguration());
var invocationConfiguration = new InvocationConfiguration();
if (parseResult.Errors.Count > 0)
{
await parseResult.InvokeAsync(invocationConfiguration, CancellationToken.None);
return 64; // EX_USAGE
}
return await parseResult.InvokeAsync(invocationConfiguration, CancellationToken.None);
}
}

View File

@@ -0,0 +1,57 @@
using System.CommandLine;
using StellaOps.Policy;
namespace StellaOps.Policy.Tools;
public static class PolicyDslValidatorCommand
{
public static RootCommand Build(IPolicyValidationRunner runner, CancellationToken? cancellationTokenOverride = null)
{
var root = new RootCommand("Validate StellaOps policy DSL files.");
Configure(root, runner, cancellationTokenOverride);
return root;
}
public static Command BuildCommand(IPolicyValidationRunner runner, CancellationToken? cancellationTokenOverride = null)
{
var command = new Command("policy-dsl-validate", "Validate StellaOps policy DSL files.");
Configure(command, runner, cancellationTokenOverride);
return command;
}
private static void Configure(Command command, IPolicyValidationRunner runner, CancellationToken? cancellationTokenOverride)
{
var inputs = new Argument<List<string>>("inputs")
{
Description = "Policy files, directories, or globs to validate.",
Arity = ArgumentArity.OneOrMore
};
var strict = new Option<bool>("--strict", new[] { "-s" })
{
Description = "Treat warnings as errors."
};
var outputJson = new Option<bool>("--json", new[] { "-j" })
{
Description = "Emit machine-readable JSON output."
};
command.Add(inputs);
command.Add(strict);
command.Add(outputJson);
command.SetAction(async (parseResult, cancellationToken) =>
{
var options = new PolicyValidationCliOptions
{
Inputs = parseResult.GetValue(inputs) ?? new List<string>(),
Strict = parseResult.GetValue(strict),
OutputJson = parseResult.GetValue(outputJson),
};
var effectiveCancellationToken = cancellationTokenOverride ?? cancellationToken;
return await runner.RunAsync(options, effectiveCancellationToken);
});
}
}

View File

@@ -0,0 +1,22 @@
using System.CommandLine;
namespace StellaOps.Policy.Tools;
public static class PolicySchemaExporterApp
{
public static async Task<int> RunAsync(string[] args)
{
var runner = new PolicySchemaExporterRunner();
var root = PolicySchemaExporterCommand.Build(runner);
var parseResult = root.Parse(args, new ParserConfiguration());
var invocationConfiguration = new InvocationConfiguration();
if (parseResult.Errors.Count > 0)
{
await parseResult.InvokeAsync(invocationConfiguration, CancellationToken.None);
return 64; // EX_USAGE
}
return await parseResult.InvokeAsync(invocationConfiguration, CancellationToken.None);
}
}

View File

@@ -0,0 +1,48 @@
using System.CommandLine;
namespace StellaOps.Policy.Tools;
public static class PolicySchemaExporterCommand
{
public static RootCommand Build(PolicySchemaExporterRunner runner, CancellationToken? cancellationTokenOverride = null)
{
var root = new RootCommand("Export policy schema JSON files.");
Configure(root, runner, cancellationTokenOverride);
return root;
}
public static Command BuildCommand(PolicySchemaExporterRunner runner, CancellationToken? cancellationTokenOverride = null)
{
var command = new Command("policy-schema-export", "Export policy schema JSON files.");
Configure(command, runner, cancellationTokenOverride);
return command;
}
private static void Configure(Command command, PolicySchemaExporterRunner runner, CancellationToken? cancellationTokenOverride)
{
var output = new Option<string?>("--output", new[] { "-o" })
{
Description = "Output directory for schema files."
};
var repoRoot = new Option<string?>("--repo-root", new[] { "-r" })
{
Description = "Repository root used to resolve default output path."
};
command.Add(output);
command.Add(repoRoot);
command.SetAction((parseResult, cancellationToken) =>
{
var options = new PolicySchemaExportOptions
{
OutputDirectory = parseResult.GetValue(output),
RepoRoot = parseResult.GetValue(repoRoot),
};
var effectiveCancellationToken = cancellationTokenOverride ?? cancellationToken;
return runner.RunAsync(options, effectiveCancellationToken);
});
}
}

View File

@@ -0,0 +1,190 @@
using System.Collections.Immutable;
using NJsonSchema;
using NJsonSchema.Generation;
using Newtonsoft.Json;
using StellaOps.Scheduler.Models;
namespace StellaOps.Policy.Tools;
public sealed record PolicySchemaExportOptions
{
public string? OutputDirectory { get; init; }
public string? RepoRoot { get; init; }
}
public sealed record SchemaExportDefinition(string FileName, Type Type);
public sealed class PolicySchemaExporterRunner
{
public async Task<int> RunAsync(PolicySchemaExportOptions options, CancellationToken cancellationToken)
{
if (options is null)
{
throw new ArgumentNullException(nameof(options));
}
var repoRoot = NormalizePath(options.RepoRoot)
?? PolicySchemaExporterPaths.TryFindRepoRoot(Directory.GetCurrentDirectory())
?? PolicySchemaExporterPaths.TryFindRepoRoot(AppContext.BaseDirectory);
string? outputDirectory;
if (!string.IsNullOrWhiteSpace(options.OutputDirectory))
{
outputDirectory = PolicySchemaExporterPaths.ResolveOutputDirectory(options.OutputDirectory!, repoRoot);
}
else if (!string.IsNullOrWhiteSpace(repoRoot))
{
outputDirectory = PolicySchemaExporterPaths.ResolveDefaultOutputDirectory(repoRoot);
}
else
{
Console.Error.WriteLine("Unable to resolve repo root. Provide --output or --repo-root.");
return 64; // EX_USAGE
}
if (!TryEnsureOutputDirectory(outputDirectory, out var error))
{
Console.Error.WriteLine(error);
return 73; // EX_CANTCREAT
}
var generator = PolicySchemaExporterSchema.CreateGenerator();
var exports = PolicySchemaExporterSchema.BuildExports();
var schemas = PolicySchemaExporterSchema.GenerateSchemas(generator, exports);
foreach (var export in exports)
{
if (!schemas.TryGetValue(export.FileName, out var json))
{
continue;
}
var outputPath = Path.Combine(outputDirectory, export.FileName);
await File.WriteAllTextAsync(outputPath, json + Environment.NewLine, cancellationToken);
Console.WriteLine($"Wrote {outputPath}");
}
return 0;
}
private static string? NormalizePath(string? path)
{
if (string.IsNullOrWhiteSpace(path))
{
return null;
}
return Path.GetFullPath(path);
}
private static bool TryEnsureOutputDirectory(string outputDirectory, out string? error)
{
error = null;
try
{
if (File.Exists(outputDirectory))
{
error = $"Output path '{outputDirectory}' is a file, expected a directory.";
return false;
}
Directory.CreateDirectory(outputDirectory);
return true;
}
catch (Exception ex)
{
error = $"Failed to create output directory '{outputDirectory}': {ex.Message}";
return false;
}
}
}
public static class PolicySchemaExporterSchema
{
public static ImmutableArray<SchemaExportDefinition> BuildExports()
=> ImmutableArray.Create(
new SchemaExportDefinition("policy-run-request.schema.json", typeof(PolicyRunRequest)),
new SchemaExportDefinition("policy-run-status.schema.json", typeof(PolicyRunStatus)),
new SchemaExportDefinition("policy-diff-summary.schema.json", typeof(PolicyDiffSummary)),
new SchemaExportDefinition("policy-explain-trace.schema.json", typeof(PolicyExplainTrace))
);
public static JsonSchemaGenerator CreateGenerator()
{
var generatorSettings = new NJsonSchema.NewtonsoftJson.Generation.NewtonsoftJsonSchemaGeneratorSettings
{
SchemaType = SchemaType.JsonSchema,
DefaultReferenceTypeNullHandling = ReferenceTypeNullHandling.NotNull,
SerializerSettings = new JsonSerializerSettings
{
ContractResolver = new Newtonsoft.Json.Serialization.CamelCasePropertyNamesContractResolver(),
NullValueHandling = NullValueHandling.Ignore,
},
};
return new JsonSchemaGenerator(generatorSettings);
}
public static IReadOnlyDictionary<string, string> GenerateSchemas(JsonSchemaGenerator generator, IEnumerable<SchemaExportDefinition> exports)
{
if (generator is null)
{
throw new ArgumentNullException(nameof(generator));
}
if (exports is null)
{
throw new ArgumentNullException(nameof(exports));
}
var results = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
foreach (var export in exports)
{
var schema = generator.Generate(export.Type);
schema.Title = export.Type.Name;
schema.AllowAdditionalProperties = false;
results[export.FileName] = schema.ToJson(Formatting.Indented);
}
return results;
}
}
public static class PolicySchemaExporterPaths
{
public static string? TryFindRepoRoot(string startDirectory)
{
if (string.IsNullOrWhiteSpace(startDirectory))
{
return null;
}
var current = new DirectoryInfo(Path.GetFullPath(startDirectory));
while (current is not null)
{
var candidate = Path.Combine(current.FullName, "src", "Directory.Build.props");
if (File.Exists(candidate))
{
return current.FullName;
}
current = current.Parent;
}
return null;
}
public static string ResolveDefaultOutputDirectory(string repoRoot)
=> Path.GetFullPath(Path.Combine(repoRoot, "docs", "schemas"));
public static string ResolveOutputDirectory(string outputPath, string? repoRoot)
{
if (Path.IsPathRooted(outputPath))
{
return Path.GetFullPath(outputPath);
}
var baseDirectory = !string.IsNullOrWhiteSpace(repoRoot) ? repoRoot : Directory.GetCurrentDirectory();
return Path.GetFullPath(Path.Combine(baseDirectory, outputPath));
}
}

View File

@@ -0,0 +1,22 @@
using System.CommandLine;
namespace StellaOps.Policy.Tools;
public static class PolicySimulationSmokeApp
{
public static async Task<int> RunAsync(string[] args)
{
var runner = new PolicySimulationSmokeRunner();
var root = PolicySimulationSmokeCommand.Build(runner);
var parseResult = root.Parse(args, new ParserConfiguration());
var invocationConfiguration = new InvocationConfiguration();
if (parseResult.Errors.Count > 0)
{
await parseResult.InvokeAsync(invocationConfiguration, CancellationToken.None);
return 64; // EX_USAGE
}
return await parseResult.InvokeAsync(invocationConfiguration, CancellationToken.None);
}
}

View File

@@ -0,0 +1,75 @@
using System.CommandLine;
namespace StellaOps.Policy.Tools;
public static class PolicySimulationSmokeCommand
{
public static RootCommand Build(PolicySimulationSmokeRunner runner, CancellationToken? cancellationTokenOverride = null)
{
var root = new RootCommand("Run policy simulation smoke scenarios.");
Configure(root, runner, cancellationTokenOverride);
return root;
}
public static Command BuildCommand(PolicySimulationSmokeRunner runner, CancellationToken? cancellationTokenOverride = null)
{
var command = new Command("policy-simulation-smoke", "Run policy simulation smoke scenarios.");
Configure(command, runner, cancellationTokenOverride);
return command;
}
private static void Configure(Command command, PolicySimulationSmokeRunner runner, CancellationToken? cancellationTokenOverride)
{
var scenarioRoot = new Option<string>("--scenario-root", new[] { "-r" })
{
Description = "Path to the policy simulation scenarios."
};
var output = new Option<string?>("--output", new[] { "-o" })
{
Description = "Directory for summary output."
};
var repoRoot = new Option<string?>("--repo-root", Array.Empty<string>())
{
Description = "Repository root for resolving relative paths."
};
var fixedTime = new Option<string?>("--fixed-time", Array.Empty<string>())
{
Description = "Fixed ISO-8601 timestamp for deterministic runs."
};
command.Add(scenarioRoot);
command.Add(output);
command.Add(repoRoot);
command.Add(fixedTime);
command.SetAction(async (parseResult, cancellationToken) =>
{
var fixedTimeValue = parseResult.GetValue(fixedTime);
DateTimeOffset? fixedTimeParsed = null;
if (!string.IsNullOrWhiteSpace(fixedTimeValue))
{
if (!PolicySimulationSmokeParsing.TryParseFixedTime(fixedTimeValue!, out var parsed))
{
Console.Error.WriteLine("Invalid --fixed-time value. Use ISO-8601 (e.g., 2025-01-02T03:04:05Z).");
return 64; // EX_USAGE
}
fixedTimeParsed = parsed;
}
var options = new PolicySimulationSmokeOptions
{
ScenarioRoot = parseResult.GetValue(scenarioRoot) ?? "samples/policy/simulations",
OutputDirectory = parseResult.GetValue(output),
RepoRoot = parseResult.GetValue(repoRoot),
FixedTime = fixedTimeParsed,
};
var effectiveCancellationToken = cancellationTokenOverride ?? cancellationToken;
return await runner.RunAsync(options, effectiveCancellationToken);
});
}
}

View File

@@ -0,0 +1,74 @@
using StellaOps.Policy;
namespace StellaOps.Policy.Tools;
public sealed record PolicySimulationScenario
{
public string Name { get; init; } = "scenario";
public string PolicyPath { get; init; } = string.Empty;
public List<ScenarioFinding> Findings { get; init; } = new();
public List<ScenarioExpectedDiff> ExpectedDiffs { get; init; } = new();
public List<ScenarioBaseline>? Baseline { get; init; }
}
public sealed record ScenarioFinding
{
public string FindingId { get; init; } = string.Empty;
public string Severity { get; init; } = "Low";
public string? Environment { get; init; }
public string? Source { get; init; }
public string? Vendor { get; init; }
public string? License { get; init; }
public string? Image { get; init; }
public string? Repository { get; init; }
public string? Package { get; init; }
public string? Purl { get; init; }
public string? Cve { get; init; }
public string? Path { get; init; }
public string? LayerDigest { get; init; }
public string[]? Tags { get; init; }
}
public sealed record ScenarioExpectedDiff
{
public string FindingId { get; init; } = string.Empty;
public string Status { get; init; } = "Pass";
}
public sealed record ScenarioBaseline
{
public string FindingId { get; init; } = string.Empty;
public string Status { get; init; } = "Pass";
public string? RuleName { get; init; }
public string? RuleAction { get; init; }
public string? Notes { get; init; }
public double Score { get; init; }
public string? ConfigVersion { get; init; }
public Dictionary<string, double>? Inputs { get; init; }
}
public sealed record ScenarioResult(string ScenarioName)
{
public bool Success { get; init; } = true;
public int ChangedCount { get; init; }
public List<string> Failures { get; } = new();
public Dictionary<string, string> ActualStatuses { get; } = new(StringComparer.OrdinalIgnoreCase);
}
public sealed class NullPolicySnapshotRepository : IPolicySnapshotRepository
{
public Task AddAsync(PolicySnapshot snapshot, CancellationToken cancellationToken = default) => Task.CompletedTask;
public Task<PolicySnapshot?> GetLatestAsync(CancellationToken cancellationToken = default) => Task.FromResult<PolicySnapshot?>(null);
public Task<IReadOnlyList<PolicySnapshot>> ListAsync(int limit, CancellationToken cancellationToken = default)
=> Task.FromResult<IReadOnlyList<PolicySnapshot>>(Array.Empty<PolicySnapshot>());
}
public sealed class NullPolicyAuditRepository : IPolicyAuditRepository
{
public Task AddAsync(PolicyAuditEntry entry, CancellationToken cancellationToken = default) => Task.CompletedTask;
public Task<IReadOnlyList<PolicyAuditEntry>> ListAsync(int limit, CancellationToken cancellationToken = default)
=> Task.FromResult<IReadOnlyList<PolicyAuditEntry>>(Array.Empty<PolicyAuditEntry>());
}

View File

@@ -0,0 +1,338 @@
using System.Collections.Immutable;
using System.Globalization;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Policy;
namespace StellaOps.Policy.Tools;
public sealed record PolicySimulationSmokeOptions
{
public string ScenarioRoot { get; init; } = "samples/policy/simulations";
public string? OutputDirectory { get; init; }
public string? RepoRoot { get; init; }
public DateTimeOffset? FixedTime { get; init; }
}
public sealed class PolicySimulationSmokeRunner
{
private readonly ILoggerFactory _loggerFactory;
public PolicySimulationSmokeRunner(ILoggerFactory? loggerFactory = null)
{
_loggerFactory = loggerFactory ?? NullLoggerFactory.Instance;
}
public async Task<int> RunAsync(PolicySimulationSmokeOptions options, CancellationToken cancellationToken)
{
if (options is null)
{
throw new ArgumentNullException(nameof(options));
}
var repoRoot = PolicySimulationSmokePaths.ResolveRepoRoot(options.RepoRoot);
var scenarioRoot = PolicySimulationSmokePaths.ResolveScenarioRoot(options.ScenarioRoot, repoRoot);
if (scenarioRoot is null)
{
Console.Error.WriteLine("Scenario root is relative; provide --repo-root or use an absolute path.");
return 64; // EX_USAGE
}
if (!Directory.Exists(scenarioRoot))
{
Console.Error.WriteLine($"Scenario root '{scenarioRoot}' does not exist.");
return 66; // EX_NOINPUT
}
var scenarioFiles = Directory.GetFiles(scenarioRoot, "scenario.json", SearchOption.AllDirectories)
.OrderBy(static path => path, StringComparer.OrdinalIgnoreCase)
.ToArray();
if (scenarioFiles.Length == 0)
{
Console.Error.WriteLine($"No scenario.json files found under '{scenarioRoot}'.");
return 0;
}
var timeProvider = options.FixedTime.HasValue
? new FixedTimeProvider(options.FixedTime.Value)
: TimeProvider.System;
var snapshotStore = new PolicySnapshotStore(
new NullPolicySnapshotRepository(),
new NullPolicyAuditRepository(),
timeProvider,
_loggerFactory.CreateLogger<PolicySnapshotStore>());
var previewService = new PolicyPreviewService(snapshotStore, _loggerFactory.CreateLogger<PolicyPreviewService>());
var serializerOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
};
var summary = new List<ScenarioResult>();
var success = true;
foreach (var scenarioFile in scenarioFiles)
{
cancellationToken.ThrowIfCancellationRequested();
var scenarioText = await File.ReadAllTextAsync(scenarioFile, cancellationToken);
var scenario = JsonSerializer.Deserialize<PolicySimulationScenario>(scenarioText, serializerOptions);
if (scenario is null)
{
Console.Error.WriteLine($"Failed to deserialize scenario '{scenarioFile}'.");
success = false;
continue;
}
var policyPath = PolicySimulationSmokePaths.ResolvePolicyPath(scenario.PolicyPath, repoRoot);
if (policyPath is null)
{
Console.Error.WriteLine($"Policy path '{scenario.PolicyPath}' is relative; provide --repo-root or use an absolute path.");
success = false;
continue;
}
if (!File.Exists(policyPath))
{
Console.Error.WriteLine($"Policy file '{scenario.PolicyPath}' referenced by scenario '{scenario.Name}' does not exist.");
success = false;
continue;
}
var policyContent = await File.ReadAllTextAsync(policyPath, cancellationToken);
var policyFormat = PolicySchema.DetectFormat(policyPath);
var findings = scenario.Findings.Select(ToPolicyFinding).ToImmutableArray();
var baseline = scenario.Baseline?.Select(ToPolicyVerdict).ToImmutableArray() ?? ImmutableArray<PolicyVerdict>.Empty;
var request = new PolicyPreviewRequest(
ImageDigest: $"sha256:simulation-{scenario.Name}",
Findings: findings,
BaselineVerdicts: baseline,
SnapshotOverride: null,
ProposedPolicy: new PolicySnapshotContent(
Content: policyContent,
Format: policyFormat,
Actor: "ci",
Source: "ci/simulation-smoke",
Description: $"CI simulation for scenario '{scenario.Name}'"));
var response = await previewService.PreviewAsync(request, cancellationToken);
var scenarioResult = PolicySimulationSmokeEvaluator.EvaluateScenario(scenario, response);
summary.Add(scenarioResult);
if (!scenarioResult.Success)
{
success = false;
}
}
if (options.OutputDirectory is not null)
{
var outputDirectory = PolicySimulationSmokePaths.ResolveOutputDirectory(options.OutputDirectory, repoRoot);
if (outputDirectory is null)
{
Console.Error.WriteLine("Output path is relative; provide --repo-root or use an absolute path.");
return 64; // EX_USAGE
}
Directory.CreateDirectory(outputDirectory);
var summaryPath = Path.Combine(outputDirectory, "policy-simulation-summary.json");
var summaryJson = JsonSerializer.Serialize(summary, new JsonSerializerOptions { WriteIndented = true });
await File.WriteAllTextAsync(summaryPath, summaryJson, cancellationToken);
}
return success ? 0 : 1;
}
private static PolicyFinding ToPolicyFinding(ScenarioFinding finding)
{
var tags = finding.Tags is null ? ImmutableArray<string>.Empty : ImmutableArray.CreateRange(finding.Tags);
var severity = Enum.Parse<PolicySeverity>(finding.Severity, ignoreCase: true);
return new PolicyFinding(
finding.FindingId,
severity,
finding.Environment,
finding.Source,
finding.Vendor,
finding.License,
finding.Image,
finding.Repository,
finding.Package,
finding.Purl,
finding.Cve,
finding.Path,
finding.LayerDigest,
tags);
}
private static PolicyVerdict ToPolicyVerdict(ScenarioBaseline baseline)
{
var status = Enum.Parse<PolicyVerdictStatus>(baseline.Status, ignoreCase: true);
var inputs = baseline.Inputs?.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase) ?? ImmutableDictionary<string, double>.Empty;
return new PolicyVerdict(
baseline.FindingId,
status,
RuleName: baseline.RuleName,
RuleAction: baseline.RuleAction,
Notes: baseline.Notes,
Score: baseline.Score,
ConfigVersion: baseline.ConfigVersion ?? PolicyScoringConfig.Default.Version,
Inputs: inputs,
QuietedBy: null,
Quiet: false,
UnknownConfidence: null,
ConfidenceBand: null,
UnknownAgeDays: null,
SourceTrust: null,
Reachability: null);
}
}
public static class PolicySimulationSmokeEvaluator
{
public static ScenarioResult EvaluateScenario(PolicySimulationScenario scenario, PolicyPreviewResponse response)
{
var result = new ScenarioResult(scenario.Name);
if (!response.Success)
{
result.Failures.Add("Preview failed.");
return result with { Success = false, ChangedCount = response.ChangedCount };
}
var diffs = response.Diffs.ToDictionary(diff => diff.Projected.FindingId, StringComparer.OrdinalIgnoreCase);
foreach (var expected in scenario.ExpectedDiffs)
{
if (!diffs.TryGetValue(expected.FindingId, out var diff))
{
result.Failures.Add($"Expected finding '{expected.FindingId}' missing from diff.");
continue;
}
var projectedStatus = diff.Projected.Status.ToString();
result.ActualStatuses[expected.FindingId] = projectedStatus;
if (!string.Equals(projectedStatus, expected.Status, StringComparison.OrdinalIgnoreCase))
{
result.Failures.Add($"Finding '{expected.FindingId}' expected status '{expected.Status}' but was '{projectedStatus}'.");
}
}
foreach (var diff in diffs.Values)
{
if (!result.ActualStatuses.ContainsKey(diff.Projected.FindingId))
{
result.ActualStatuses[diff.Projected.FindingId] = diff.Projected.Status.ToString();
}
}
var success = result.Failures.Count == 0;
return result with
{
Success = success,
ChangedCount = response.ChangedCount
};
}
}
public static class PolicySimulationSmokePaths
{
public static string? ResolveRepoRoot(string? explicitRepoRoot)
{
if (!string.IsNullOrWhiteSpace(explicitRepoRoot))
{
return Path.GetFullPath(explicitRepoRoot);
}
return TryFindRepoRoot(Directory.GetCurrentDirectory())
?? TryFindRepoRoot(AppContext.BaseDirectory);
}
public static string? ResolveScenarioRoot(string scenarioRoot, string? repoRoot)
{
if (Path.IsPathRooted(scenarioRoot))
{
return Path.GetFullPath(scenarioRoot);
}
if (string.IsNullOrWhiteSpace(repoRoot))
{
return null;
}
return Path.GetFullPath(Path.Combine(repoRoot, scenarioRoot));
}
public static string? ResolvePolicyPath(string policyPath, string? repoRoot)
{
if (Path.IsPathRooted(policyPath))
{
return Path.GetFullPath(policyPath);
}
if (string.IsNullOrWhiteSpace(repoRoot))
{
return null;
}
return Path.GetFullPath(Path.Combine(repoRoot, policyPath));
}
public static string? ResolveOutputDirectory(string outputDirectory, string? repoRoot)
{
if (Path.IsPathRooted(outputDirectory))
{
return Path.GetFullPath(outputDirectory);
}
var baseDirectory = !string.IsNullOrWhiteSpace(repoRoot) ? repoRoot : Directory.GetCurrentDirectory();
return Path.GetFullPath(Path.Combine(baseDirectory, outputDirectory));
}
public static string? TryFindRepoRoot(string startDirectory)
{
if (string.IsNullOrWhiteSpace(startDirectory))
{
return null;
}
var current = new DirectoryInfo(Path.GetFullPath(startDirectory));
while (current is not null)
{
var candidate = Path.Combine(current.FullName, "src", "Directory.Build.props");
if (File.Exists(candidate))
{
return current.FullName;
}
current = current.Parent;
}
return null;
}
}
public static class PolicySimulationSmokeParsing
{
public static bool TryParseFixedTime(string value, out DateTimeOffset fixedTime)
=> DateTimeOffset.TryParse(
value,
CultureInfo.InvariantCulture,
DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal,
out fixedTime);
}
public sealed class FixedTimeProvider : TimeProvider
{
private readonly DateTimeOffset _fixedTime;
public FixedTimeProvider(DateTimeOffset fixedTime)
{
_fixedTime = fixedTime.ToUniversalTime();
}
public override DateTimeOffset GetUtcNow() => _fixedTime;
public override TimeZoneInfo LocalTimeZone => TimeZoneInfo.Utc;
}

View File

@@ -0,0 +1,21 @@
using StellaOps.Policy;
namespace StellaOps.Policy.Tools;
public interface IPolicyValidationRunner
{
Task<int> RunAsync(PolicyValidationCliOptions options, CancellationToken cancellationToken);
}
public sealed class PolicyValidationRunner : IPolicyValidationRunner
{
private readonly PolicyValidationCli _cli;
public PolicyValidationRunner(PolicyValidationCli cli)
{
_cli = cli ?? throw new ArgumentNullException(nameof(cli));
}
public Task<int> RunAsync(PolicyValidationCliOptions options, CancellationToken cancellationToken)
=> _cli.RunAsync(options, cancellationToken);
}

View File

@@ -0,0 +1,23 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Newtonsoft.Json" />
<PackageReference Include="NJsonSchema" />
<PackageReference Include="NJsonSchema.CodeGeneration.CSharp" />
<PackageReference Include="NJsonSchema.NewtonsoftJson" />
<PackageReference Include="System.CommandLine" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\Policy\__Libraries\StellaOps.Policy\StellaOps.Policy.csproj" />
<ProjectReference Include="..\..\Scheduler\__Libraries\StellaOps.Scheduler.Models\StellaOps.Scheduler.Models.csproj" />
</ItemGroup>
</Project>