Extend Vexer attestation/export stack and Concelier OSV fixes

This commit is contained in:
2025-10-16 19:44:10 +03:00
parent 46f7c807d3
commit cb3acb8c4a
103 changed files with 6852 additions and 1840 deletions

View File

@@ -112,9 +112,9 @@
| Sprint 5 | Vexer Core Foundations | src/StellaOps.Vexer.Core/TASKS.md | DONE (2025-10-15) | Team Vexer Core & Policy | VEXER-CORE-01-003 | Publish shared connector/exporter/attestation abstractions and deterministic query signature utilities for cache/attestation workflows. |
| Sprint 5 | Vexer Core Foundations | src/StellaOps.Vexer.Policy/TASKS.md | DONE (2025-10-15) | Team Vexer Policy | VEXER-POLICY-01-001 | Established policy options & snapshot provider covering baseline weights/overrides. |
| Sprint 5 | Vexer Core Foundations | src/StellaOps.Vexer.Policy/TASKS.md | DONE (2025-10-15) | Team Vexer Policy | VEXER-POLICY-01-002 | Policy evaluator now feeds consensus resolver with immutable snapshots. |
| Sprint 5 | Vexer Core Foundations | src/StellaOps.Vexer.Policy/TASKS.md | TODO | Team Vexer Policy | VEXER-POLICY-01-003 | Author policy diagnostics, CLI/WebService surfacing, and documentation updates. |
| Sprint 5 | Vexer Core Foundations | src/StellaOps.Vexer.Policy/TASKS.md | TODO | Team Vexer Policy | VEXER-POLICY-01-004 | Implement YAML/JSON schema validation and deterministic diagnostics for operator bundles. |
| Sprint 5 | Vexer Core Foundations | src/StellaOps.Vexer.Policy/TASKS.md | TODO | Team Vexer Policy | VEXER-POLICY-01-005 | Add policy change tracking, snapshot digests, and telemetry/logging hooks. |
| Sprint 5 | Vexer Core Foundations | src/StellaOps.Vexer.Policy/TASKS.md | DONE (2025-10-16) | Team Vexer Policy | VEXER-POLICY-01-003 | Author policy diagnostics, CLI/WebService surfacing, and documentation updates. |
| Sprint 5 | Vexer Core Foundations | src/StellaOps.Vexer.Policy/TASKS.md | DONE (2025-10-16) | Team Vexer Policy | VEXER-POLICY-01-004 | Implement YAML/JSON schema validation and deterministic diagnostics for operator bundles. |
| Sprint 5 | Vexer Core Foundations | src/StellaOps.Vexer.Policy/TASKS.md | DONE (2025-10-16) | Team Vexer Policy | VEXER-POLICY-01-005 | Add policy change tracking, snapshot digests, and telemetry/logging hooks. |
| Sprint 5 | Vexer Core Foundations | src/StellaOps.Vexer.Storage.Mongo/TASKS.md | DONE (2025-10-15) | Team Vexer Storage | VEXER-STORAGE-01-001 | Mongo mapping registry plus raw/export entities and DI extensions in place. |
| Sprint 5 | Vexer Core Foundations | src/StellaOps.Vexer.Storage.Mongo/TASKS.md | TODO | Team Vexer Storage | VEXER-STORAGE-01-004 | Build provider/consensus/cache class maps and related collections. |
| Sprint 5 | Vexer Core Foundations | src/StellaOps.Vexer.Export/TASKS.md | DONE (2025-10-15) | Team Vexer Export | VEXER-EXPORT-01-001 | Export engine delivers cache lookup, manifest creation, and policy integration. |
@@ -134,3 +134,17 @@
| Sprint 6 | Vexer Ingest & Formats | src/StellaOps.Vexer.Connectors.Ubuntu.CSAF/TASKS.md | TODO | Team Vexer Connectors Ubuntu | VEXER-CONN-UBUNTU-01-001 | Implement Ubuntu CSAF discovery and channel selection for USN ingestion. |
| Sprint 6 | Vexer Ingest & Formats | src/StellaOps.Vexer.Connectors.OCI.OpenVEX.Attest/TASKS.md | TODO | Team Vexer Connectors OCI | VEXER-CONN-OCI-01-001 | Wire OCI discovery/auth to fetch OpenVEX attestations for configured images. |
| Sprint 6 | Vexer Ingest & Formats | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI | VEXER-CLI-01-001 | Add `vexer` CLI verbs bridging to WebService with consistent auth and offline UX. |
| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Vexer.Core/TASKS.md | TODO | Team Vexer Core & Policy | VEXER-CORE-02-001 | Context signal schema prep extend consensus models with severity/KEV/EPSS fields and update canonical serializers. |
| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Vexer.Policy/TASKS.md | TODO | Team Vexer Policy | VEXER-POLICY-02-001 | Scoring coefficients & weight ceilings add α/β options, weight boosts, and validation guidance. |
| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Vexer.Storage.Mongo/TASKS.md | TODO | Team Vexer Storage | VEXER-STORAGE-02-001 | Statement events & scoring signals create immutable VEX statement store plus consensus extensions with indexes/migrations. |
| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Vexer.WebService/TASKS.md | TODO | Team Vexer WebService | VEXER-WEB-01-004 | Resolve API & signed responses expose `/vexer/resolve`, return signed consensus/score envelopes, document auth. |
| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Vexer.Attestation/TASKS.md | TODO | Team Vexer Attestation | VEXER-ATTEST-01-002 | Rekor v2 client integration ship transparency log client with retries and offline queue. |
| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Vexer.Worker/TASKS.md | TODO | Team Vexer Worker | VEXER-WORKER-01-004 | TTL refresh & stability damper schedule re-resolve loops and guard against status flapping. |
| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Vexer.Export/TASKS.md | TODO | Team Vexer Export | VEXER-EXPORT-01-005 | Score & resolve envelope surfaces include signed consensus/score artifacts in exports. |
| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Feedser.Core/TASKS.md | TODO | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-07-001 | Advisory event log & asOf queries surface immutable statements and replay capability. |
| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Feedser.Core/TASKS.md | TODO | Team Core Engine & Data Science | FEEDCORE-ENGINE-07-002 | Noise prior computation service learn false-positive priors and expose deterministic summaries. |
| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | TODO | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-07-001 | Advisory statement & conflict collections provision Mongo schema/indexes for event-sourced merge. |
| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Feedser.Merge/TASKS.md | TODO | BE-Merge | FEEDMERGE-ENGINE-07-001 | Conflict sets & explainers persist conflict materialization and replay hashes for merge decisions. |
| Sprint 8 | Mongo strengthening | src/StellaOps.Feedser.Storage.Mongo/TASKS.md | TODO | Team Normalization & Storage Backbone | FEEDSTORAGE-MONGO-08-001 | Causal-consistent Feedser storage sessions<br>Ensure `AddMongoStorage` registers a scoped session facilitator (causal consistency + majority concerns), update repositories to accept optional session handles, and add integration coverage proving read-your-write and monotonic reads across a replica set/election scenario. |
| Sprint 8 | Mongo strengthening | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Storage Guild | AUTHSTORAGE-MONGO-08-001 | Harden Authority Mongo usage<br>Introduce scoped MongoDB sessions with `writeConcern`/`readConcern` majority defaults, flow the session through stores used in mutations + follow-up reads, and document middleware pattern for web/API & GraphQL layers. |
| Sprint 8 | Mongo strengthening | src/StellaOps.Vexer.Storage.Mongo/TASKS.md | TODO | Team Vexer Storage | VEXER-STORAGE-MONGO-08-001 | Causal consistency for Vexer repositories<br>Register Mongo options with majority defaults, push session-aware overloads through raw/export/consensus/cache stores, and extend migration/tests to validate causal reads after writes (including GridFS-backed content) under replica-set failover. |

View File

@@ -26,6 +26,7 @@ MongoDB acts as the canonical store; collections (with logical responsibilities)
- `vex.consensus` consensus projections per `(vulnId, productKey)` capturing rollup status, source weights, conflicts, and policy revision.
- `vex.exports` export manifests containing artifact digests, cache metadata, and attestation pointers.
- `vex.cache` index from `querySignature`/`format` to export digest for fast reuse.
- `vex.migrations` tracks applied storage migrations (index bootstrap, future schema updates).
GridFS is used for large raw payloads when necessary, and artifact stores (S3/MinIO/file) hold serialized exports referenced by `vex.exports`.
@@ -54,6 +55,7 @@ Policy snapshots are immutable and versioned so consensus records capture the po
- JSON serialization uses `VexCanonicalJsonSerializer`, enforcing property ordering and camelCase naming for reproducible snapshots and test fixtures.
- `VexQuerySignature` produces canonical filter/order strings and SHA-256 digests, enabling cache keys shared across services.
- Export manifests reuse cached artifacts when the same signature/format is requested unless `ForceRefresh` is explicitly set.
- For scorring multiple sources on same VEX topic use - `VEXER_SCORRING.md`
## 6. Observability & offline posture
@@ -68,5 +70,16 @@ Policy snapshots are immutable and versioned so consensus records capture the po
- Build WebService endpoints (`/vexer/status`, `/vexer/claims`, `/vexer/exports`) plus CLI verbs mirroring Feedser patterns.
- Provide CSAF, CycloneDX VEX, and OpenVEX normalizers along with vendor-specific connectors (Red Hat, Cisco, SUSE, MSRC, Oracle, Ubuntu, OCI attestation).
- Extend policy diagnostics with schema validation, change tracking, and operator-facing diff reports.
- Mongo bootstrapper runs ordered migrations (`vex.migrations`) to ensure indexes for raw documents, providers, consensus snapshots, exports, and cache entries.
## Appendix A Policy diagnostics workflow
- `StellaOps.Vexer.Policy` now exposes `IVexPolicyDiagnostics`, producing deterministic diagnostics reports with timestamp, severity counts, active provider overrides, and the full issue list surfaced by `IVexPolicyProvider`.
- CLI/WebService layers should call `IVexPolicyDiagnostics.GetDiagnostics()` to display operator-friendly summaries (`vexer policy diagnostics` and `/vexer/policy/diagnostics` are the planned entry points).
- Recommendations in the report guide operators to resolve blocking errors, review warnings, and audit override usage before consensus runs—embed them directly in UX copy instead of re-deriving logic.
- Export/consensus telemetry should log the diagnostic `Version` alongside `policyRevisionId` so dashboards can correlate policy changes with consensus decisions.
- Offline installations can persist the diagnostics report (JSON) in the Offline Kit to document policy headroom during audits; the output is deterministic and diff-friendly.
- Use `VexPolicyBinder` when ingesting operator-supplied YAML/JSON bundles; it normalizes weight/override values, reports deterministic issues, and returns the consensus-ready `VexConsensusPolicyOptions` used by `VexPolicyProvider`.
- Reload telemetry emits `vex.policy.reloads` (tags: `revision`, `version`, `issues`) whenever a new digest is observed—feed this into dashboards to correlate policy changes with consensus outcomes.
This architecture keeps Vexer aligned with StellaOps' deterministic, offline-operable design while layering VEX-specific consensus and attestation capabilities on top of the Feedser foundations.

83
docs/VEXER_SCORRING.md Normal file
View File

@@ -0,0 +1,83 @@
## Status
This document tracks the future-looking risk scoring model for Vexer. The calculation below is not active yet; Sprint 7 work will add the required schema fields, policy controls, and services. Until that ships, Vexer emits consensus statuses without numeric scores.
## Scoring model (target state)
**S = Gate(VEX_status) × W_trust(source) × [Severity_base × (1 + α·KEV + β·EPSS)]**
* **Gate(VEX_status)**: `affected`/`under_investigation` → 1, `not_affected`/`fixed` → 0. A trusted “not affected” or “fixed” still zeroes the score.
* **W_trust(source)**: normalized policy weight (baseline 01). Policies may opt into >1 boosts for signed vendor feeds once Phase 1 closes.
* **Severity_base**: canonical numeric severity from Feedser (CVSS or org-defined scale).
* **KEV flag**: 0/1 boost when CISA Known Exploited Vulnerabilities applies.
* **EPSS**: probability [0,1]; bounded multiplier.
* **α, β**: configurable coefficients (default α=0.25, β=0.5) stored in policy.
Safeguards: freeze boosts when product identity is unknown, clamp outputs ≥0, and log every factor in the audit trail.
## Implementation roadmap
| Phase | Scope | Artifacts |
| --- | --- | --- |
| **Phase 1 Schema foundations** | Extend Vexer consensus/claims and Feedser canonical advisories with severity, KEV, EPSS, and expose α/β + weight ceilings in policy. | Sprint 7 tasks `VEXER-CORE-02-001`, `VEXER-POLICY-02-001`, `VEXER-STORAGE-02-001`, `FEEDCORE-ENGINE-07-001`. |
| **Phase 2 Deterministic score engine** | Implement a scoring component that executes alongside consensus and persists score envelopes with hashes. | Planned task `VEXER-CORE-02-002` (backlog). |
| **Phase 3 Surfacing & enforcement** | Expose scores via WebService/CLI, integrate with Feedser noise priors, and enforce policy-based suppressions. | To be scheduled after Phase 2. |
## Data model (after Phase 1)
```json
{
"vulnerabilityId": "CVE-2025-12345",
"product": "pkg:name@version",
"consensus": {
"status": "affected",
"policyRevisionId": "rev-12",
"policyDigest": "0D9AEC…"
},
"signals": {
"severity": {"scheme": "CVSS:3.1", "score": 7.5},
"kev": true,
"epss": 0.40
},
"policy": {
"weight": 1.15,
"alpha": 0.25,
"beta": 0.5
},
"score": {
"value": 10.8,
"generatedAt": "2025-11-05T14:12:30Z",
"audit": [
"gate:affected",
"weight:1.15",
"severity:7.5",
"kev:1",
"epss:0.40"
]
}
}
```
## Operational guidance
* **Inputs**: Feedser delivers severity/KEV/EPSS via the advisory event log; Vexer connectors load VEX statements. Policy owns trust tiers and coefficients.
* **Processing**: the scoring engine (Phase 2) runs next to consensus, storing results with deterministic hashes so exports and attestations can reference them.
* **Consumption**: WebService/CLI will return consensus plus score; scanners may suppress findings only when policy-authorized VEX gating and signed score envelopes agree.
## Pseudocode (Phase 2 preview)
```python
def risk_score(gate, weight, severity, kev, epss, alpha, beta, freeze_boosts=False):
if gate == 0:
return 0
if freeze_boosts:
kev, epss = 0, 0
boost = 1 + alpha * kev + beta * epss
return max(0, weight * severity * boost)
```
## FAQ
* **Can operators opt out?** Set α=β=0 or keep weights ≤1.0 via policy.
* **What about missing signals?** Treat them as zero and log the omission.
* **When will this ship?** Phase 1 is planned for Sprint 7; later phases depend on connector coverage and attestation delivery.

View File

@@ -1,6 +1,6 @@
# Feedser GHSA Connector Operations Runbook
_Last updated: 2025-10-12_
_Last updated: 2025-10-16_
## 1. Overview
The GitHub Security Advisories (GHSA) connector pulls advisory metadata from the GitHub REST API `/security/advisories` endpoint. GitHub enforces both primary and secondary rate limits, so operators must monitor usage and configure retries to avoid throttling incidents.
@@ -114,3 +114,10 @@ When enabling GHSA the first time, run a staged backfill:
- Prometheus: `ghsa_ratelimit_remaining_bucket` (from histogram) use `histogram_quantile(0.99, ...)` to trend capacity.
- VictoriaMetrics: `LAST_over_time(ghsa_ratelimit_remaining_sum[5m])` for simple last-value graphs.
- Grafana: stack remaining + used to visualise total limit per resource.
## 8. Canonical metric fallback analytics
When GitHub omits CVSS vectors/scores, the connector now assigns a deterministic canonical metric id in the form `ghsa:severity/<level>` and publishes it to Merge so severity precedence still resolves against GHSA even without CVSS data.
- Metric: `ghsa.map.canonical_metric_fallbacks` (counter) with tags `severity`, `canonical_metric_id`, `reason=no_cvss`.
- Monitor the counter alongside Merge parity checks; a sudden spike suggests GitHub is shipping advisories without vectors and warrants cross-checking downstream exporters.
- Because the canonical id feeds Merge, parity dashboards should overlay this metric to confirm fallback advisories continue to merge ahead of downstream sources when GHSA supplies more recent data.

View File

@@ -0,0 +1,24 @@
# Feedser OSV Connector Operations Notes
_Last updated: 2025-10-16_
The OSV connector ingests advisories from OSV.dev across OSS ecosystems. This note highlights the additional merge/export expectations introduced with the canonical metric fallback work in Sprint 4.
## 1. Canonical metric fallbacks
- When OSV omits CVSS vectors (common for CVSS v4-only payloads) the mapper now emits a deterministic canonical metric id in the form `osv:severity/<level>` and normalises the advisory severity to the same `<level>`.
- Metric: `osv.map.canonical_metric_fallbacks` (counter) with tags `severity`, `canonical_metric_id`, `ecosystem`, `reason=no_cvss`. Watch this alongside merge parity dashboards to catch spikes where OSV publishes severity-only advisories.
- Merge precedence still prefers GHSA over OSV; the shared severity-based canonical id keeps Merge/export parity deterministic even when only OSV supplies severity data.
## 2. CWE provenance
- `database_specific.cwe_ids` now populates provenance decision reasons for every mapped weakness. Expect `decisionReason="database_specific.cwe_ids"` on OSV weakness provenance and confirm exporters preserve the value.
- If OSV ever attaches `database_specific.cwe_notes`, the connector will surface the joined note string in `decisionReason` instead of the default marker.
## 3. Dashboards & alerts
- Extend existing merge dashboards with the new counter:
- Overlay `sum(osv.map.canonical_metric_fallbacks{ecosystem=~".+"})` with Merge severity overrides to confirm fallback advisories are reconciling cleanly.
- Alert when the 1-hour sum exceeds 50 for any ecosystem; baseline volume is currently <5 per day (mostly GHSA mirrors emitting CVSS v4 only).
- Exporters already surface `canonicalMetricId`; no schema change is required, but ORAS/Trivy bundles should be spot-checked after deploying the connector update.
## 4. Runbook updates
- Fixture parity suites (`osv-ghsa.*`) now assert the fallback id and provenance notes. Regenerate via `dotnet test src/StellaOps.Feedser.Source.Osv.Tests/StellaOps.Feedser.Source.Osv.Tests.csproj`.
- When investigating merge severity conflicts, include the fallback counter and confirm OSV advisories carry the expected `osv:severity/<level>` id before raising connector bugs.

View File

@@ -19,5 +19,6 @@
| AUTHCORE-BUILD-OPENIDDICT | DONE (2025-10-14) | Authority Core | SEC2.HOST | Adapt host/audit handlers for OpenIddict 6.4 API surface (no `OpenIddictServerTransaction`) and restore Authority solution build. | ✅ Build `dotnet build src/StellaOps.Authority.sln` succeeds; ✅ Audit correlation + tamper logging verified under new abstractions; ✅ Tests updated. |
| AUTHCORE-STORAGE-DEVICE-TOKENS | DONE (2025-10-14) | Authority Core, Storage Guild | AUTHCORE-BUILD-OPENIDDICT | Reintroduce `AuthorityTokenDeviceDocument` + projections removed during refactor so storage layer compiles. | ✅ Document type restored with mappings/migrations; ✅ Storage tests cover device artifacts; ✅ Authority solution build green. |
| AUTHCORE-BOOTSTRAP-INVITES | DONE (2025-10-14) | Authority Core, DevOps | AUTHCORE-STORAGE-DEVICE-TOKENS | Wire bootstrap invite cleanup service against restored document schema and re-enable lifecycle tests. | ✅ `BootstrapInviteCleanupService` passes integration tests; ✅ Operator guide updated if behavior changes; ✅ Build/test matrices green. |
| AUTHSTORAGE-MONGO-08-001 | TODO | Authority Core & Storage Guild | — | Harden Mongo session usage with causal consistency for mutations and follow-up reads. | • Scoped middleware/service creates `IClientSessionHandle` with causal consistency + majority read/write concerns<br>• Stores accept optional session parameter and reuse it for write + immediate reads<br>• GraphQL/HTTP pipelines updated to flow session through post-mutation queries<br>• Replica-set integration test exercises primary election and verifies read-your-write guarantees |
> Update status columns (TODO / DOING / DONE / BLOCKED) together with code changes. Always run `dotnet test src/StellaOps.Authority.sln` when touching host logic.

View File

@@ -16,3 +16,5 @@
|FEEDCORE-ENGINE-03-002 Field precedence and tie-breaker map|BE-Core|Merge|DONE field precedence and freshness overrides enforced via `FieldPrecedence` map with tie-breakers and analytics capture. **Reminder:** Storage/Merge owners review precedence overrides when onboarding new feeds to ensure `decisionReason` tagging stays consistent.|
|Canonical merger parity for description/CWE/canonical metric|BE-Core|Models|DONE (2025-10-15) merger now populates description/CWEs/canonical metric id with provenance and regression tests cover the new decisions.|
|Reference normalization & freshness instrumentation cleanup|BE-Core, QA|Models|DONE (2025-10-15) reference keys normalized, freshness overrides applied to union fields, and new tests assert decision logging.|
|FEEDCORE-ENGINE-07-001 Advisory event log & asOf queries|Team Core Engine & Storage Analytics|FEEDSTORAGE-DATA-07-001|TODO Introduce immutable advisory statement events, expose `asOf` query surface for merge/export pipelines, and document determinism guarantees for replay.|
|FEEDCORE-ENGINE-07-002 Noise prior computation service|Team Core Engine & Data Science|FEEDCORE-ENGINE-07-001|TODO Build rule-based learner capturing false-positive priors per package/env, persist summaries, and expose APIs for Vexer/scan suppressors with reproducible statistics.|

View File

@@ -18,3 +18,4 @@
|Range primitives backlog|BE-Merge|Connector WGs|**DOING** Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical RangePrimitives with provenance tags; track progress/fixtures here.<br>2025-10-11: Storage alignment notes + sample normalized rule JSON now captured in `RANGE_PRIMITIVES_COORDINATION.md` (see “Storage alignment quick reference”).<br>2025-10-11 18:45Z: GHSA normalized rules landed; OSV connector picked up next for rollout.<br>2025-10-11 21:10Z: `docs/dev/merge_semver_playbook.md` Section 8 now documents the persisted Mongo projection (SemVer + NEVRA) for connector reviewers.<br>2025-10-11 21:30Z: Added `docs/dev/normalized_versions_rollout.md` dashboard to centralize connector status and upcoming milestones.<br>2025-10-11 21:55Z: Merge now emits `feedser.merge.normalized_rules*` counters and unions connector-provided normalized arrays; see new test coverage in `AdvisoryPrecedenceMergerTests.Merge_RecordsNormalizedRuleMetrics`.<br>2025-10-12 17:05Z: CVE + KEV normalized rule verification complete; OSV parity fixtures revalidated—downstream parity/monitoring tasks may proceed.|
|Merge pipeline parity for new advisory fields|BE-Merge|Models, Core|DONE (2025-10-15) merge service now surfaces description/CWE/canonical metric decisions with updated metrics/tests.|
|Connector coordination for new advisory fields|Connector Leads, BE-Merge|Models, Core|**DONE (2025-10-15)** GHSA, NVD, and OSV connectors now emit advisory descriptions, CWE weaknesses, and canonical metric ids. Fixtures refreshed (GHSA connector regression suite, `conflict-nvd.canonical.json`, OSV parity snapshots) and completion recorded in coordination log.|
|FEEDMERGE-ENGINE-07-001 Conflict sets & explainers|BE-Merge|FEEDSTORAGE-DATA-07-001|TODO Persist conflict sets referencing advisory statements, output rule/explainer payloads with replay hashes, and add integration tests covering deterministic `asOf` evaluations.|

View File

@@ -89,7 +89,7 @@
"CVE-2025-4242",
"GHSA-qqqq-wwww-eeee"
],
"canonicalMetricId": null,
"canonicalMetricId": "ghsa:severity/high",
"credits": [
{
"displayName": "maintainer-team",
@@ -192,4 +192,4 @@
"severity": "high",
"summary": "Container escape in conflict-package",
"title": "Container escape in conflict-package"
}
}

View File

@@ -76,6 +76,8 @@ public sealed class GhsaConflictFixtureTests
};
var advisory = GhsaMapper.Map(dto, document, recordedAt);
Assert.Equal("ghsa:severity/high", advisory.CanonicalMetricId);
Assert.True(advisory.CvssMetrics.IsEmpty);
var snapshot = SnapshotSerializer.ToSnapshot(advisory).Replace("\r\n", "\n").TrimEnd();
var expectedPath = Path.Combine(AppContext.BaseDirectory, "Fixtures", "conflict-ghsa.canonical.json");

View File

@@ -0,0 +1,53 @@
using StellaOps.Feedser.Source.Ghsa.Internal;
using StellaOps.Feedser.Storage.Mongo.Documents;
namespace StellaOps.Feedser.Source.Ghsa.Tests;
public sealed class GhsaMapperTests
{
[Fact]
public void Map_WhenCvssVectorMissing_UsesSeverityFallback()
{
var recordedAt = new DateTimeOffset(2025, 4, 10, 12, 0, 0, TimeSpan.Zero);
var document = new DocumentRecord(
Id: Guid.Parse("d7814678-3c3e-4e63-98c4-68e2f6d7ba6f"),
SourceName: GhsaConnectorPlugin.SourceName,
Uri: "https://github.com/advisories/GHSA-fallback-test",
FetchedAt: recordedAt.AddHours(-2),
Sha256: "sha256-ghsa-fallback-test",
Status: "completed",
ContentType: "application/json",
Headers: null,
Metadata: null,
Etag: "\"etag-ghsa-fallback\"",
LastModified: recordedAt.AddHours(-3),
GridFsId: null);
var dto = new GhsaRecordDto
{
GhsaId = "GHSA-fallback-test",
Summary = "Severity-only GHSA advisory",
Description = "GHSA record where GitHub omitted CVSS vector/score.",
Severity = null,
PublishedAt = recordedAt.AddDays(-3),
UpdatedAt = recordedAt.AddDays(-1),
Aliases = new[] { "GHSA-fallback-test" },
References = Array.Empty<GhsaReferenceDto>(),
Affected = Array.Empty<GhsaAffectedDto>(),
Credits = Array.Empty<GhsaCreditDto>(),
Cwes = Array.Empty<GhsaWeaknessDto>(),
Cvss = new GhsaCvssDto
{
Severity = "CRITICAL",
Score = null,
VectorString = null,
}
};
var advisory = GhsaMapper.Map(dto, document, recordedAt);
Assert.Equal("critical", advisory.Severity);
Assert.Equal("ghsa:severity/critical", advisory.CanonicalMetricId);
Assert.True(advisory.CvssMetrics.IsEmpty);
}
}

View File

@@ -381,6 +381,22 @@ public sealed class GhsaConnector : IFeedConnector
var advisory = GhsaMapper.Map(dto, document, dtoRecord.ValidatedAt);
if (advisory.CvssMetrics.IsEmpty && !string.IsNullOrWhiteSpace(advisory.CanonicalMetricId))
{
var fallbackSeverity = string.IsNullOrWhiteSpace(advisory.Severity)
? "unknown"
: advisory.Severity!;
_diagnostics.CanonicalMetricFallback(advisory.CanonicalMetricId!, fallbackSeverity);
if (_logger.IsEnabled(LogLevel.Debug))
{
_logger.LogDebug(
"GHSA {GhsaId} emitted canonical metric fallback {CanonicalMetricId} (severity {Severity})",
advisory.AdvisoryKey,
advisory.CanonicalMetricId,
fallbackSeverity);
}
}
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);

View File

@@ -23,6 +23,7 @@ public sealed class GhsaDiagnostics : IDisposable
private readonly Histogram<double> _rateLimitHeadroomPct;
private readonly ObservableGauge<double> _rateLimitHeadroomGauge;
private readonly Counter<long> _rateLimitExhausted;
private readonly Counter<long> _canonicalMetricFallbacks;
private readonly object _rateLimitLock = new();
private GhsaRateLimitSnapshot? _lastRateLimitSnapshot;
private readonly Dictionary<(string Phase, string? Resource), GhsaRateLimitSnapshot> _rateLimitSnapshots = new();
@@ -44,6 +45,7 @@ public sealed class GhsaDiagnostics : IDisposable
_rateLimitHeadroomPct = _meter.CreateHistogram<double>("ghsa.ratelimit.headroom_pct", unit: "percent");
_rateLimitHeadroomGauge = _meter.CreateObservableGauge("ghsa.ratelimit.headroom_pct_current", ObserveHeadroom, unit: "percent");
_rateLimitExhausted = _meter.CreateCounter<long>("ghsa.ratelimit.exhausted", unit: "events");
_canonicalMetricFallbacks = _meter.CreateCounter<long>("ghsa.map.canonical_metric_fallbacks", unit: "advisories");
}
public void FetchAttempt() => _fetchAttempts.Add(1);
@@ -100,6 +102,13 @@ public sealed class GhsaDiagnostics : IDisposable
internal void RateLimitExhausted(string phase)
=> _rateLimitExhausted.Add(1, new KeyValuePair<string, object?>("phase", phase));
public void CanonicalMetricFallback(string canonicalMetricId, string severity)
=> _canonicalMetricFallbacks.Add(
1,
new KeyValuePair<string, object?>("canonical_metric_id", canonicalMetricId),
new KeyValuePair<string, object?>("severity", severity),
new KeyValuePair<string, object?>("reason", "no_cvss"));
internal GhsaRateLimitSnapshot? GetLastRateLimitSnapshot()
{
lock (_rateLimitLock)

View File

@@ -57,7 +57,19 @@ internal static class GhsaMapper
var weaknesses = CreateWeaknesses(dto.Cwes, recordedAt);
var cvssMetrics = CreateCvssMetrics(dto.Cvss, recordedAt, out var cvssSeverity, out var canonicalMetricId);
var severity = SeverityNormalization.Normalize(dto.Severity) ?? cvssSeverity;
var severityHint = SeverityNormalization.Normalize(dto.Severity);
var cvssSeverityHint = SeverityNormalization.Normalize(dto.Cvss?.Severity);
var severity = severityHint ?? cvssSeverity ?? cvssSeverityHint;
if (canonicalMetricId is null)
{
var fallbackSeverity = severityHint ?? cvssSeverityHint ?? cvssSeverity;
if (!string.IsNullOrWhiteSpace(fallbackSeverity))
{
canonicalMetricId = BuildSeverityCanonicalMetricId(fallbackSeverity);
}
}
var summary = dto.Summary ?? dto.Description;
var description = Validation.TrimToNull(dto.Description);
@@ -81,6 +93,9 @@ internal static class GhsaMapper
canonicalMetricId: canonicalMetricId);
}
private static string BuildSeverityCanonicalMetricId(string severity)
=> $"{GhsaConnectorPlugin.SourceName}:severity/{severity}";
private static AdvisoryReference? CreateReference(GhsaReferenceDto reference, DateTimeOffset recordedAt)
{
if (string.IsNullOrWhiteSpace(reference.Url) || !Validation.LooksLikeHttpUrl(reference.Url))

View File

@@ -16,4 +16,4 @@
|FEEDCONN-GHSA-02-005 Quota monitoring hardening|BE-Conn-GHSA, Observability|Source.Common metrics|**DONE (2025-10-12)** Diagnostics expose headroom histograms/gauges, warning logs dedupe below the configured threshold, and the ops runbook gained alerting and mitigation guidance.|
|FEEDCONN-GHSA-02-006 Scheduler rollout integration|BE-Conn-GHSA, Ops|Job scheduler|**DONE (2025-10-12)** Dependency routine tests assert cron/timeouts, and the runbook highlights cron overrides plus backoff toggles for staged rollouts.|
|FEEDCONN-GHSA-04-003 Description/CWE/metric parity rollout|BE-Conn-GHSA|Models, Core|**DONE (2025-10-15)** Mapper emits advisory description, CWE weaknesses, and canonical CVSS metric id with updated fixtures (`osv-ghsa.osv.json` parity suite) and connector regression covers the new fields. Reported completion to Merge coordination.|
|FEEDCONN-GHSA-04-004 Canonical metric fallback coverage|BE-Conn-GHSA|Models, Merge|TODO Ensure canonical metric ids remain populated when GitHub omits CVSS vectors/scores; add fixtures capturing severity-only advisories, document precedence with Merge, and emit analytics to track fallback usage.|
|FEEDCONN-GHSA-04-004 Canonical metric fallback coverage|BE-Conn-GHSA|Models, Merge|**DONE (2025-10-16)** Ensure canonical metric ids remain populated when GitHub omits CVSS vectors/scores; add fixtures capturing severity-only advisories, document precedence with Merge, and emit analytics to track fallback usage.<br>2025-10-16: Mapper now emits `ghsa:severity/<level>` canonical ids when vectors are missing, diagnostics expose `ghsa.map.canonical_metric_fallbacks`, conflict/mapper fixtures updated, and runbook documents Merge precedence. Tests: `dotnet test src/StellaOps.Feedser.Source.Ghsa.Tests/StellaOps.Feedser.Source.Ghsa.Tests.csproj`.|

View File

@@ -124,6 +124,46 @@ public sealed class OsvMapperTests
Assert.Equal("3.1", advisory.CvssMetrics[0].Version);
}
[Fact]
public void Map_AssignsSeverityFallbackWhenCvssVectorUnsupported()
{
using var databaseSpecificJson = JsonDocument.Parse("""
{
"severity": "MODERATE",
"cwe_ids": ["CWE-290"]
}
""");
var dto = new OsvVulnerabilityDto
{
Id = "OSV-CVSS4",
Summary = "Severity-only advisory",
Details = "OSV entry that lacks a parsable CVSS vector.",
Published = DateTimeOffset.UtcNow.AddDays(-10),
Modified = DateTimeOffset.UtcNow.AddDays(-5),
DatabaseSpecific = databaseSpecificJson.RootElement,
Severity = new[]
{
new OsvSeverityDto
{
Type = "CVSS_V4",
Score = "CVSS:4.0/AV:N/AC:H/AT:N/PR:N/UI:N/VC:L/VI:L/VA:N/SC:N/SI:N/SA:N"
}
}
};
var (document, dtoRecord) = CreateDocumentAndDtoRecord(dto, "PyPI");
var advisory = OsvMapper.Map(dto, document, dtoRecord, "PyPI");
Assert.True(advisory.CvssMetrics.IsEmpty);
Assert.Equal("medium", advisory.Severity);
Assert.Equal("osv:severity/medium", advisory.CanonicalMetricId);
var weakness = Assert.Single(advisory.Cwes);
var provenance = Assert.Single(weakness.Provenance);
Assert.Equal("database_specific.cwe_ids", provenance.DecisionReason);
}
[Theory]
[InlineData("Go", "github.com/example/project", "pkg:golang/github.com/example/project")]
[InlineData("PyPI", "social_auth_app_django", "pkg:pypi/social-auth-app-django")]

View File

@@ -0,0 +1,36 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.Metrics;
namespace StellaOps.Feedser.Source.Osv.Internal;
/// <summary>
/// Connector-specific diagnostics for OSV mapping.
/// </summary>
public sealed class OsvDiagnostics : IDisposable
{
private const string MeterName = "StellaOps.Feedser.Source.Osv";
private const string MeterVersion = "1.0.0";
private readonly Meter _meter;
private readonly Counter<long> _canonicalMetricFallbacks;
public OsvDiagnostics()
{
_meter = new Meter(MeterName, MeterVersion);
_canonicalMetricFallbacks = _meter.CreateCounter<long>("osv.map.canonical_metric_fallbacks", unit: "advisories");
}
public void CanonicalMetricFallback(string canonicalMetricId, string severity, string? ecosystem)
=> _canonicalMetricFallbacks.Add(
1,
new KeyValuePair<string, object?>("canonical_metric_id", canonicalMetricId),
new KeyValuePair<string, object?>("severity", severity),
new KeyValuePair<string, object?>("ecosystem", string.IsNullOrWhiteSpace(ecosystem) ? "unknown" : ecosystem),
new KeyValuePair<string, object?>("reason", "no_cvss"));
public void Dispose()
{
_meter.Dispose();
}
}

View File

@@ -68,11 +68,22 @@ internal static class OsvMapper
var credits = BuildCredits(dto, recordedAt);
var affectedPackages = BuildAffectedPackages(dto, ecosystem, recordedAt);
var cvssMetrics = BuildCvssMetrics(dto, recordedAt, out var severity);
var databaseSpecificSeverity = ExtractDatabaseSpecificSeverity(dto.DatabaseSpecific);
if (severity is null)
{
severity = databaseSpecificSeverity;
}
var weaknesses = BuildWeaknesses(dto, recordedAt);
var canonicalMetricId = cvssMetrics.Count > 0
? $"{cvssMetrics[0].Version}|{cvssMetrics[0].Vector}"
: null;
if (canonicalMetricId is null && !string.IsNullOrWhiteSpace(severity))
{
canonicalMetricId = BuildSeverityCanonicalMetricId(severity);
}
var normalizedDescription = DescriptionNormalizer.Normalize(new[]
{
new LocalizedText(dto.Details, "en"),
@@ -106,7 +117,10 @@ internal static class OsvMapper
descriptionText,
weaknesses,
canonicalMetricId);
}
}
private static string BuildSeverityCanonicalMetricId(string severity)
=> $"{OsvConnectorPlugin.SourceName}:severity/{severity}";
private static IEnumerable<string> BuildAliases(OsvVulnerabilityDto dto)
{
@@ -509,7 +523,8 @@ internal static class OsvMapper
"weakness",
identifier,
recordedAt,
new[] { ProvenanceFieldMasks.Weaknesses });
new[] { ProvenanceFieldMasks.Weaknesses },
decisionReason: GetCweDecisionReason(dto.DatabaseSpecific, identifier));
var provenanceArray = ImmutableArray.Create(provenance);
list.Add(new AdvisoryWeakness(
@@ -550,6 +565,78 @@ internal static class OsvMapper
return digits.Length == 0 ? null : $"https://cwe.mitre.org/data/definitions/{digits}.html";
}
private static string? ExtractDatabaseSpecificSeverity(JsonElement databaseSpecific)
{
if (databaseSpecific.ValueKind != JsonValueKind.Object)
{
return null;
}
if (!databaseSpecific.TryGetProperty("severity", out var severityElement))
{
return null;
}
if (severityElement.ValueKind == JsonValueKind.String)
{
var severity = severityElement.GetString();
return SeverityNormalization.Normalize(severity);
}
return null;
}
private static string? GetCweDecisionReason(JsonElement databaseSpecific, string identifier)
{
if (databaseSpecific.ValueKind != JsonValueKind.Object)
{
return null;
}
var hasCweIds = databaseSpecific.TryGetProperty("cwe_ids", out _);
string? notes = null;
if (databaseSpecific.TryGetProperty("cwe_notes", out var notesElement))
{
notes = NormalizeCweNotes(notesElement);
}
if (!string.IsNullOrWhiteSpace(notes))
{
return notes;
}
return hasCweIds ? "database_specific.cwe_ids" : null;
}
private static string? NormalizeCweNotes(JsonElement notesElement)
{
if (notesElement.ValueKind == JsonValueKind.String)
{
return Validation.TrimToNull(notesElement.GetString());
}
if (notesElement.ValueKind != JsonValueKind.Array)
{
return null;
}
var buffer = new List<string>();
foreach (var item in notesElement.EnumerateArray())
{
if (item.ValueKind == JsonValueKind.String)
{
var value = Validation.TrimToNull(item.GetString());
if (!string.IsNullOrEmpty(value))
{
buffer.Add(value);
}
}
}
return buffer.Count == 0 ? null : string.Join(" | ", buffer);
}
private static IReadOnlyList<CvssMetric> BuildCvssMetrics(OsvVulnerabilityDto dto, DateTimeOffset recordedAt, out string? severity)
{
severity = null;

View File

@@ -14,8 +14,7 @@ using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using MongoDB.Bson.IO;
using StellaOps.Feedser.Models;
using StellaOps.Feedser.Models;
using StellaOps.Feedser.Models;
using StellaOps.Feedser.Source.Common;
using StellaOps.Feedser.Source.Common.Fetch;
using StellaOps.Feedser.Source.Osv.Configuration;
@@ -39,35 +38,38 @@ public sealed class OsvConnector : IFeedConnector
private readonly IHttpClientFactory _httpClientFactory;
private readonly RawDocumentStorage _rawDocumentStorage;
private readonly IDocumentStore _documentStore;
private readonly IDtoStore _dtoStore;
private readonly IAdvisoryStore _advisoryStore;
private readonly ISourceStateRepository _stateRepository;
private readonly OsvOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<OsvConnector> _logger;
private readonly IDtoStore _dtoStore;
private readonly IAdvisoryStore _advisoryStore;
private readonly ISourceStateRepository _stateRepository;
private readonly OsvOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<OsvConnector> _logger;
private readonly OsvDiagnostics _diagnostics;
public OsvConnector(
IHttpClientFactory httpClientFactory,
RawDocumentStorage rawDocumentStorage,
IDocumentStore documentStore,
IDtoStore dtoStore,
IAdvisoryStore advisoryStore,
ISourceStateRepository stateRepository,
IOptions<OsvOptions> options,
TimeProvider? timeProvider,
ILogger<OsvConnector> logger)
{
_httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
IDocumentStore documentStore,
IDtoStore dtoStore,
IAdvisoryStore advisoryStore,
ISourceStateRepository stateRepository,
IOptions<OsvOptions> options,
OsvDiagnostics diagnostics,
TimeProvider? timeProvider,
ILogger<OsvConnector> logger)
{
_httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public string SourceName => OsvConnectorPlugin.SourceName;
@@ -259,16 +261,31 @@ public sealed class OsvConnector : IFeedConnector
continue;
}
var ecosystem = document.Metadata is not null && document.Metadata.TryGetValue("osv.ecosystem", out var ecosystemValue)
? ecosystemValue
: "unknown";
var advisory = OsvMapper.Map(osvDto, document, dto, ecosystem);
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
}
var ecosystem = document.Metadata is not null && document.Metadata.TryGetValue("osv.ecosystem", out var ecosystemValue)
? ecosystemValue
: "unknown";
var advisory = OsvMapper.Map(osvDto, document, dto, ecosystem);
if (advisory.CvssMetrics.IsEmpty && !string.IsNullOrWhiteSpace(advisory.CanonicalMetricId))
{
var fallbackSeverity = string.IsNullOrWhiteSpace(advisory.Severity) ? "unknown" : advisory.Severity!;
_diagnostics.CanonicalMetricFallback(advisory.CanonicalMetricId!, fallbackSeverity, ecosystem);
if (_logger.IsEnabled(LogLevel.Debug))
{
_logger.LogDebug(
"OSV {OsvId} emitted canonical metric fallback {CanonicalMetricId} (severity {Severity}, ecosystem {Ecosystem})",
advisory.AdvisoryKey,
advisory.CanonicalMetricId,
fallbackSeverity,
ecosystem);
}
}
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
}
var updatedCursor = cursor.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);

View File

@@ -2,7 +2,8 @@ using System;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using StellaOps.Feedser.Source.Common.Http;
using StellaOps.Feedser.Source.Osv.Configuration;
using StellaOps.Feedser.Source.Osv.Configuration;
using StellaOps.Feedser.Source.Osv.Internal;
namespace StellaOps.Feedser.Source.Osv;
@@ -17,21 +18,22 @@ public static class OsvServiceCollectionExtensions
.Configure(configure)
.PostConfigure(static opts => opts.Validate());
services.AddSourceHttpClient(OsvOptions.HttpClientName, (sp, clientOptions) =>
{
var options = sp.GetRequiredService<IOptions<OsvOptions>>().Value;
clientOptions.BaseAddress = options.BaseUri;
clientOptions.Timeout = options.HttpTimeout;
services.AddSourceHttpClient(OsvOptions.HttpClientName, (sp, clientOptions) =>
{
var options = sp.GetRequiredService<IOptions<OsvOptions>>().Value;
clientOptions.BaseAddress = options.BaseUri;
clientOptions.Timeout = options.HttpTimeout;
clientOptions.UserAgent = "StellaOps.Feedser.OSV/1.0";
clientOptions.AllowedHosts.Clear();
clientOptions.AllowedHosts.Add(options.BaseUri.Host);
clientOptions.DefaultRequestHeaders["Accept"] = "application/zip";
});
services.AddTransient<OsvConnector>();
services.AddTransient<OsvFetchJob>();
services.AddTransient<OsvParseJob>();
services.AddTransient<OsvMapJob>();
return services;
clientOptions.DefaultRequestHeaders["Accept"] = "application/zip";
});
services.AddSingleton<OsvDiagnostics>();
services.AddTransient<OsvConnector>();
services.AddTransient<OsvFetchJob>();
services.AddTransient<OsvParseJob>();
services.AddTransient<OsvMapJob>();
return services;
}
}

View File

@@ -17,4 +17,4 @@
|FEEDCONN-OSV-04-003 Parity fixture refresh|QA, BE-Conn-OSV|Normalized versions rollout, GHSA parity tests|**DONE (2025-10-12)** Parity fixtures include normalizedVersions notes (`osv:<ecosystem>:<id>:<purl>`); regression math rerun via `dotnet test src/StellaOps.Feedser.Source.Osv.Tests` and docs flagged for workflow sync.|
|FEEDCONN-OSV-04-002 Conflict regression fixtures|BE-Conn-OSV, QA|Merge `FEEDMERGE-ENGINE-04-001`|**DONE (2025-10-12)** Added `conflict-osv.canonical.json` + regression asserting SemVer range + CVSS medium severity; dataset matches GHSA/NVD fixtures for merge tests. Validation: `dotnet test src/StellaOps.Feedser.Source.Osv.Tests/StellaOps.Feedser.Source.Osv.Tests.csproj --filter OsvConflictFixtureTests`.|
|FEEDCONN-OSV-04-004 Description/CWE/metric parity rollout|BE-Conn-OSV|Models, Core|**DONE (2025-10-15)** OSV mapper writes advisory descriptions, `database_specific.cwe_ids` weaknesses, and canonical CVSS metric id. Parity fixtures (`osv-ghsa.*`, `osv-npm.snapshot.json`, `osv-pypi.snapshot.json`) refreshed and status communicated to Merge coordination.|
|FEEDCONN-OSV-04-005 Canonical metric fallbacks & CWE notes|BE-Conn-OSV|Models, Merge|TODO Add fallback logic and metrics for advisories lacking CVSS vectors, enrich CWE provenance notes, and document merge/export expectations; refresh parity fixtures accordingly.|
|FEEDCONN-OSV-04-005 Canonical metric fallbacks & CWE notes|BE-Conn-OSV|Models, Merge|**DONE (2025-10-16)** Add fallback logic and metrics for advisories lacking CVSS vectors, enrich CWE provenance notes, and document merge/export expectations; refresh parity fixtures accordingly.<br>2025-10-16: Mapper now emits `osv:severity/<level>` canonical ids for severity-only advisories, weakness provenance carries `database_specific.cwe_ids`, diagnostics expose `osv.map.canonical_metric_fallbacks`, parity fixtures regenerated, and ops notes added in `docs/ops/feedser-osv-operations.md`. Tests: `dotnet test src/StellaOps.Feedser.Source.Osv.Tests/StellaOps.Feedser.Source.Osv.Tests.csproj`.|

View File

@@ -20,3 +20,5 @@
|FEEDSTORAGE-DATA-02-002 Provenance decision persistence|BE-Storage|Models `FEEDMODELS-SCHEMA-01-002`|**DONE (2025-10-12)** Normalized documents carry decision reasons/source/timestamps with regression coverage verifying SemVer notes + provenance fallbacks.|
|FEEDSTORAGE-DATA-02-003 Normalized versions index creation|BE-Storage|Normalization, Mongo bootstrapper|**DONE (2025-10-12)** Bootstrapper seeds `normalizedVersions.*` indexes when SemVer style is enabled; docs/tests confirm index presence.|
|FEEDSTORAGE-DATA-04-001 Advisory payload parity (description/CWEs/canonical metric)|BE-Storage|Models, Core|DONE (2025-10-15) Mongo payloads round-trip new advisory fields; serializer/tests updated, no migration required beyond optional backfill.|
|FEEDSTORAGE-MONGO-08-001 Causal-consistent session plumbing|BE-Storage|Feedser Core DI|TODO Introduce scoped MongoDB session provider enabling causal consistency + majority read/write concerns in `AddMongoStorage`; flow optional `IClientSessionHandle` through job/advisory/source state/document stores; add integration test simulating primary election to prove read-your-write + monotonic reads.|
|FEEDSTORAGE-DATA-07-001 Advisory statement & conflict collections|Team Normalization & Storage Backbone|FEEDMERGE-ENGINE-07-001|TODO Create `advisory_statements` (immutable) and `advisory_conflicts` collections, define `asOf`/`vulnerabilityKey` indexes, and document migration/rollback steps for event-sourced merge.|

View File

@@ -0,0 +1,38 @@
using Amazon.S3;
using Amazon.S3.Model;
using Moq;
using StellaOps.Vexer.ArtifactStores.S3;
using StellaOps.Vexer.Export;
namespace StellaOps.Vexer.ArtifactStores.S3.Tests;
public sealed class S3ArtifactClientTests
{
[Fact]
public async Task ObjectExistsAsync_ReturnsTrue_WhenMetadataSucceeds()
{
var mock = new Mock<IAmazonS3>();
mock.Setup(x => x.GetObjectMetadataAsync("bucket", "key", default)).ReturnsAsync(new GetObjectMetadataResponse
{
HttpStatusCode = System.Net.HttpStatusCode.OK,
});
var client = new S3ArtifactClient(mock.Object, Microsoft.Extensions.Logging.Abstractions.NullLogger<S3ArtifactClient>.Instance);
var exists = await client.ObjectExistsAsync("bucket", "key", default);
Assert.True(exists);
}
[Fact]
public async Task PutObjectAsync_MapsMetadata()
{
var mock = new Mock<IAmazonS3>();
mock.Setup(x => x.PutObjectAsync(It.IsAny<PutObjectRequest>(), default))
.ReturnsAsync(new PutObjectResponse());
var client = new S3ArtifactClient(mock.Object, Microsoft.Extensions.Logging.Abstractions.NullLogger<S3ArtifactClient>.Instance);
using var stream = new MemoryStream(new byte[] { 1, 2, 3 });
await client.PutObjectAsync("bucket", "key", stream, new Dictionary<string, string> { ["a"] = "b" }, default);
mock.Verify(x => x.PutObjectAsync(It.Is<PutObjectRequest>(r => r.Metadata["a"] == "b"), default), Times.Once);
}
}

View File

@@ -0,0 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Moq" Version="4.20.70" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Vexer.ArtifactStores.S3\StellaOps.Vexer.ArtifactStores.S3.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,38 @@
using Amazon;
using Amazon.Runtime;
using Amazon.S3;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using StellaOps.Vexer.Export;
namespace StellaOps.Vexer.ArtifactStores.S3.Extensions;
public static class ServiceCollectionExtensions
{
public static IServiceCollection AddVexS3ArtifactClient(this IServiceCollection services, Action<S3ArtifactClientOptions> configure)
{
ArgumentNullException.ThrowIfNull(configure);
services.Configure(configure);
services.AddSingleton(CreateS3Client);
services.AddSingleton<IS3ArtifactClient, S3ArtifactClient>();
return services;
}
private static IAmazonS3 CreateS3Client(IServiceProvider provider)
{
var options = provider.GetRequiredService<IOptions<S3ArtifactClientOptions>>().Value;
var config = new AmazonS3Config
{
RegionEndpoint = RegionEndpoint.GetBySystemName(options.Region),
ForcePathStyle = options.ForcePathStyle,
};
if (!string.IsNullOrWhiteSpace(options.ServiceUrl))
{
config.ServiceURL = options.ServiceUrl;
}
return new AmazonS3Client(config);
}
}

View File

@@ -0,0 +1,85 @@
using Amazon.S3;
using Amazon.S3.Model;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Vexer.Export;
namespace StellaOps.Vexer.ArtifactStores.S3;
public sealed class S3ArtifactClientOptions
{
public string Region { get; set; } = "us-east-1";
public string? ServiceUrl { get; set; }
= null;
public bool ForcePathStyle { get; set; }
= true;
}
public sealed class S3ArtifactClient : IS3ArtifactClient
{
private readonly IAmazonS3 _s3;
private readonly ILogger<S3ArtifactClient> _logger;
public S3ArtifactClient(IAmazonS3 s3, ILogger<S3ArtifactClient> logger)
{
_s3 = s3 ?? throw new ArgumentNullException(nameof(s3));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<bool> ObjectExistsAsync(string bucketName, string key, CancellationToken cancellationToken)
{
try
{
var metadata = await _s3.GetObjectMetadataAsync(bucketName, key, cancellationToken).ConfigureAwait(false);
return metadata.HttpStatusCode == System.Net.HttpStatusCode.OK;
}
catch (AmazonS3Exception ex) when (ex.StatusCode == System.Net.HttpStatusCode.NotFound)
{
return false;
}
}
public async Task PutObjectAsync(string bucketName, string key, Stream content, IDictionary<string, string> metadata, CancellationToken cancellationToken)
{
var request = new PutObjectRequest
{
BucketName = bucketName,
Key = key,
InputStream = content,
AutoCloseStream = false,
};
foreach (var kvp in metadata)
{
request.Metadata[kvp.Key] = kvp.Value;
}
await _s3.PutObjectAsync(request, cancellationToken).ConfigureAwait(false);
_logger.LogDebug("Uploaded object {Bucket}/{Key}", bucketName, key);
}
public async Task<Stream?> GetObjectAsync(string bucketName, string key, CancellationToken cancellationToken)
{
try
{
var response = await _s3.GetObjectAsync(bucketName, key, cancellationToken).ConfigureAwait(false);
var buffer = new MemoryStream();
await response.ResponseStream.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false);
buffer.Position = 0;
return buffer;
}
catch (AmazonS3Exception ex) when (ex.StatusCode == System.Net.HttpStatusCode.NotFound)
{
_logger.LogDebug("Object {Bucket}/{Key} not found", bucketName, key);
return null;
}
}
public async Task DeleteObjectAsync(string bucketName, string key, CancellationToken cancellationToken)
{
await _s3.DeleteObjectAsync(bucketName, key, cancellationToken).ConfigureAwait(false);
_logger.LogDebug("Deleted object {Bucket}/{Key}", bucketName, key);
}
}

View File

@@ -0,0 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="AWSSDK.S3" Version="3.7.305.6" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="8.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Vexer.Export\StellaOps.Vexer.Export.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Vexer.Attestation\StellaOps.Vexer.Attestation.csproj" />
<ProjectReference Include="..\StellaOps.Vexer.Core\StellaOps.Vexer.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,81 @@
using System.Collections.Immutable;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Vexer.Attestation.Dsse;
using StellaOps.Vexer.Attestation.Signing;
using StellaOps.Vexer.Attestation.Transparency;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Attestation.Tests;
public sealed class VexAttestationClientTests
{
[Fact]
public async Task SignAsync_ReturnsEnvelopeDigestAndDiagnostics()
{
var signer = new FakeSigner();
var builder = new VexDsseBuilder(signer, NullLogger<VexDsseBuilder>.Instance);
var options = Options.Create(new VexAttestationClientOptions());
var client = new VexAttestationClient(builder, options, NullLogger<VexAttestationClient>.Instance);
var request = new VexAttestationRequest(
ExportId: "exports/456",
QuerySignature: new VexQuerySignature("filters"),
Artifact: new VexContentAddress("sha256", "deadbeef"),
Format: VexExportFormat.Json,
CreatedAt: DateTimeOffset.UtcNow,
SourceProviders: ImmutableArray.Create("vendor"),
Metadata: ImmutableDictionary<string, string>.Empty);
var response = await client.SignAsync(request, CancellationToken.None);
Assert.NotNull(response.Attestation);
Assert.NotNull(response.Attestation.EnvelopeDigest);
Assert.True(response.Diagnostics.ContainsKey("envelope"));
}
[Fact]
public async Task SignAsync_SubmitsToTransparencyLog_WhenConfigured()
{
var signer = new FakeSigner();
var builder = new VexDsseBuilder(signer, NullLogger<VexDsseBuilder>.Instance);
var options = Options.Create(new VexAttestationClientOptions());
var transparency = new FakeTransparencyLogClient();
var client = new VexAttestationClient(builder, options, NullLogger<VexAttestationClient>.Instance, transparencyLogClient: transparency);
var request = new VexAttestationRequest(
ExportId: "exports/789",
QuerySignature: new VexQuerySignature("filters"),
Artifact: new VexContentAddress("sha256", "deadbeef"),
Format: VexExportFormat.Json,
CreatedAt: DateTimeOffset.UtcNow,
SourceProviders: ImmutableArray.Create("vendor"),
Metadata: ImmutableDictionary<string, string>.Empty);
var response = await client.SignAsync(request, CancellationToken.None);
Assert.NotNull(response.Attestation.Rekor);
Assert.True(response.Diagnostics.ContainsKey("rekorLocation"));
Assert.True(transparency.SubmitCalled);
}
private sealed class FakeSigner : IVexSigner
{
public ValueTask<VexSignedPayload> SignAsync(ReadOnlyMemory<byte> payload, CancellationToken cancellationToken)
=> ValueTask.FromResult(new VexSignedPayload("signature", "key"));
}
private sealed class FakeTransparencyLogClient : ITransparencyLogClient
{
public bool SubmitCalled { get; private set; }
public ValueTask<TransparencyLogEntry> SubmitAsync(DsseEnvelope envelope, CancellationToken cancellationToken)
{
SubmitCalled = true;
return ValueTask.FromResult(new TransparencyLogEntry(Guid.NewGuid().ToString(), "https://rekor.example/entries/123", "23", null));
}
public ValueTask<bool> VerifyAsync(string entryLocation, CancellationToken cancellationToken)
=> ValueTask.FromResult(true);
}
}

View File

@@ -0,0 +1,52 @@
using System.Collections.Immutable;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Vexer.Attestation.Dsse;
using StellaOps.Vexer.Attestation.Models;
using StellaOps.Vexer.Attestation.Signing;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Attestation.Tests;
public sealed class VexDsseBuilderTests
{
[Fact]
public async Task CreateEnvelopeAsync_ProducesDeterministicPayload()
{
var signer = new FakeSigner("signature-value", "key-1");
var builder = new VexDsseBuilder(signer, NullLogger<VexDsseBuilder>.Instance);
var request = new VexAttestationRequest(
ExportId: "exports/123",
QuerySignature: new VexQuerySignature("filters"),
Artifact: new VexContentAddress("sha256", "deadbeef"),
Format: VexExportFormat.Json,
CreatedAt: DateTimeOffset.UtcNow,
SourceProviders: ImmutableArray.Create("vendor"),
Metadata: ImmutableDictionary<string, string>.Empty);
var envelope = await builder.CreateEnvelopeAsync(request, request.Metadata, CancellationToken.None);
Assert.Equal("application/vnd.in-toto+json", envelope.PayloadType);
Assert.Single(envelope.Signatures);
Assert.Equal("signature-value", envelope.Signatures[0].Signature);
Assert.Equal("key-1", envelope.Signatures[0].KeyId);
var digest = VexDsseBuilder.ComputeEnvelopeDigest(envelope);
Assert.StartsWith("sha256:", digest);
}
private sealed class FakeSigner : IVexSigner
{
private readonly string _signature;
private readonly string _keyId;
public FakeSigner(string signature, string keyId)
{
_signature = signature;
_keyId = keyId;
}
public ValueTask<VexSignedPayload> SignAsync(ReadOnlyMemory<byte> payload, CancellationToken cancellationToken)
=> ValueTask.FromResult(new VexSignedPayload(_signature, _keyId));
}
}

View File

@@ -0,0 +1,13 @@
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Vexer.Attestation.Dsse;
public sealed record DsseEnvelope(
[property: JsonPropertyName("payload")] string Payload,
[property: JsonPropertyName("payloadType")] string PayloadType,
[property: JsonPropertyName("signatures")] IReadOnlyList<DsseSignature> Signatures);
public sealed record DsseSignature(
[property: JsonPropertyName("sig")] string Signature,
[property: JsonPropertyName("keyid")] string? KeyId);

View File

@@ -0,0 +1,83 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Vexer.Attestation.Models;
using StellaOps.Vexer.Attestation.Signing;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Attestation.Dsse;
public sealed class VexDsseBuilder
{
private const string PayloadType = "application/vnd.in-toto+json";
private readonly IVexSigner _signer;
private readonly ILogger<VexDsseBuilder> _logger;
private readonly JsonSerializerOptions _serializerOptions;
public VexDsseBuilder(IVexSigner signer, ILogger<VexDsseBuilder> logger)
{
_signer = signer ?? throw new ArgumentNullException(nameof(signer));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_serializerOptions = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.Never,
WriteIndented = false,
};
_serializerOptions.Converters.Add(new JsonStringEnumConverter(JsonNamingPolicy.CamelCase));
}
public async ValueTask<DsseEnvelope> CreateEnvelopeAsync(
VexAttestationRequest request,
IReadOnlyDictionary<string, string>? metadata,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
var predicate = VexAttestationPredicate.FromRequest(request, metadata);
var subject = new VexInTotoSubject(
request.ExportId,
new Dictionary<string, string>(StringComparer.Ordinal)
{
{ request.Artifact.Algorithm.ToLowerInvariant(), request.Artifact.Digest }
});
var statement = new VexInTotoStatement(
VexInTotoStatement.InTotoType,
"https://stella-ops.org/attestations/vex-export",
new[] { subject },
predicate);
var payloadBytes = JsonSerializer.SerializeToUtf8Bytes(statement, _serializerOptions);
var signatureResult = await _signer.SignAsync(payloadBytes, cancellationToken).ConfigureAwait(false);
var envelope = new DsseEnvelope(
Convert.ToBase64String(payloadBytes),
PayloadType,
new[] { new DsseSignature(signatureResult.Signature, signatureResult.KeyId) });
_logger.LogDebug("DSSE envelope created for export {ExportId}", request.ExportId);
return envelope;
}
public static string ComputeEnvelopeDigest(DsseEnvelope envelope)
{
ArgumentNullException.ThrowIfNull(envelope);
var envelopeJson = JsonSerializer.Serialize(envelope, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
});
var bytes = Encoding.UTF8.GetBytes(envelopeJson);
var hash = SHA256.HashData(bytes);
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
}
}

View File

@@ -0,0 +1,24 @@
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Vexer.Attestation.Dsse;
using StellaOps.Vexer.Attestation.Transparency;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Attestation.Extensions;
public static class VexAttestationServiceCollectionExtensions
{
public static IServiceCollection AddVexAttestation(this IServiceCollection services)
{
services.AddSingleton<VexDsseBuilder>();
services.AddSingleton<IVexAttestationClient, VexAttestationClient>();
return services;
}
public static IServiceCollection AddVexRekorClient(this IServiceCollection services, Action<RekorHttpClientOptions> configure)
{
ArgumentNullException.ThrowIfNull(configure);
services.Configure(configure);
services.AddHttpClient<ITransparencyLogClient, RekorHttpClient>();
return services;
}
}

View File

@@ -0,0 +1,44 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Text.Json.Serialization;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Attestation.Models;
public sealed record VexAttestationPredicate(
string ExportId,
string QuerySignature,
string ArtifactAlgorithm,
string ArtifactDigest,
VexExportFormat Format,
DateTimeOffset CreatedAt,
IReadOnlyList<string> SourceProviders,
IReadOnlyDictionary<string, string> Metadata)
{
public static VexAttestationPredicate FromRequest(
VexAttestationRequest request,
IReadOnlyDictionary<string, string>? metadata = null)
=> new(
request.ExportId,
request.QuerySignature.Value,
request.Artifact.Algorithm,
request.Artifact.Digest,
request.Format,
request.CreatedAt,
request.SourceProviders,
metadata is null ? ImmutableDictionary<string, string>.Empty : metadata.ToImmutableDictionary(StringComparer.Ordinal));
}
public sealed record VexInTotoSubject(
string Name,
IReadOnlyDictionary<string, string> Digest);
public sealed record VexInTotoStatement(
[property: JsonPropertyName("_type")] string Type,
string PredicateType,
IReadOnlyList<VexInTotoSubject> Subject,
VexAttestationPredicate Predicate)
{
public static readonly string InTotoType = "https://in-toto.io/Statement/v0.1";
}

View File

@@ -0,0 +1,12 @@
using System;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Vexer.Attestation.Signing;
public sealed record VexSignedPayload(string Signature, string? KeyId);
public interface IVexSigner
{
ValueTask<VexSignedPayload> SignAsync(ReadOnlyMemory<byte> payload, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="8.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Vexer.Core\StellaOps.Vexer.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -2,6 +2,6 @@ If you are working on this file you need to read docs/ARCHITECTURE_VEXER.md and
# TASKS
| Task | Owner(s) | Depends on | Notes |
|---|---|---|---|
|VEXER-ATTEST-01-001 In-toto predicate & DSSE builder|Team Vexer Attestation|VEXER-CORE-01-001|TODO Implement export attestation predicates and DSSE envelope builder with deterministic hashing and signer abstraction.|
|VEXER-ATTEST-01-002 Rekor v2 client integration|Team Vexer Attestation|VEXER-ATTEST-01-001|TODO Provide `ITransparencyLogClient` with submit/verify operations, retries, and offline queue fallback matching architecture guidance.|
|VEXER-ATTEST-01-001 In-toto predicate & DSSE builder|Team Vexer Attestation|VEXER-CORE-01-001|**DONE (2025-10-16)** Added deterministic in-toto predicate/statement models, DSSE envelope builder wired to signer abstraction, and attestation client producing metadata + diagnostics.|
|VEXER-ATTEST-01-002 Rekor v2 client integration|Team Vexer Attestation|VEXER-ATTEST-01-001|**DONE (2025-10-16)** Implemented Rekor HTTP client with retry/backoff, transparency log abstraction, DI helpers, and attestation client integration capturing Rekor metadata + diagnostics.|
|VEXER-ATTEST-01-003 Verification suite & observability|Team Vexer Attestation|VEXER-ATTEST-01-002|TODO Add verification helpers for Worker/WebService, metrics/logging hooks, and negative-path regression tests.|

View File

@@ -0,0 +1,14 @@
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Vexer.Attestation.Dsse;
namespace StellaOps.Vexer.Attestation.Transparency;
public sealed record TransparencyLogEntry(string Id, string Location, string? LogIndex, string? InclusionProofUrl);
public interface ITransparencyLogClient
{
ValueTask<TransparencyLogEntry> SubmitAsync(DsseEnvelope envelope, CancellationToken cancellationToken);
ValueTask<bool> VerifyAsync(string entryLocation, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,91 @@
using System.Net.Http.Json;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Vexer.Attestation.Dsse;
namespace StellaOps.Vexer.Attestation.Transparency;
internal sealed class RekorHttpClient : ITransparencyLogClient
{
private readonly HttpClient _httpClient;
private readonly RekorHttpClientOptions _options;
private readonly ILogger<RekorHttpClient> _logger;
public RekorHttpClient(HttpClient httpClient, IOptions<RekorHttpClientOptions> options, ILogger<RekorHttpClient> logger)
{
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
ArgumentNullException.ThrowIfNull(options);
_options = options.Value;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
if (!string.IsNullOrWhiteSpace(_options.BaseAddress))
{
_httpClient.BaseAddress = new Uri(_options.BaseAddress, UriKind.Absolute);
}
if (!string.IsNullOrWhiteSpace(_options.ApiKey))
{
_httpClient.DefaultRequestHeaders.Add("Authorization", _options.ApiKey);
}
}
public async ValueTask<TransparencyLogEntry> SubmitAsync(DsseEnvelope envelope, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(envelope);
var payload = JsonSerializer.Serialize(envelope);
using var content = new StringContent(payload);
content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json");
HttpResponseMessage? response = null;
for (var attempt = 0; attempt < _options.RetryCount; attempt++)
{
response = await _httpClient.PostAsync("/api/v2/log/entries", content, cancellationToken).ConfigureAwait(false);
if (response.IsSuccessStatusCode)
{
break;
}
_logger.LogWarning("Rekor submission failed with status {Status}; attempt {Attempt}", response.StatusCode, attempt + 1);
if (attempt + 1 < _options.RetryCount)
{
await Task.Delay(_options.RetryDelay, cancellationToken).ConfigureAwait(false);
}
}
if (response is null || !response.IsSuccessStatusCode)
{
throw new HttpRequestException($"Failed to submit attestation to Rekor ({response?.StatusCode}).");
}
var entryLocation = response.Headers.Location?.ToString() ?? string.Empty;
var body = await response.Content.ReadFromJsonAsync<JsonElement>(cancellationToken: cancellationToken).ConfigureAwait(false);
var entry = ParseEntryLocation(entryLocation, body);
_logger.LogInformation("Rekor entry recorded at {Location}", entry.Location);
return entry;
}
public async ValueTask<bool> VerifyAsync(string entryLocation, CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(entryLocation))
{
return false;
}
var response = await _httpClient.GetAsync(entryLocation, cancellationToken).ConfigureAwait(false);
return response.IsSuccessStatusCode;
}
private static TransparencyLogEntry ParseEntryLocation(string location, JsonElement body)
{
var id = body.TryGetProperty("uuid", out var uuid) ? uuid.GetString() ?? string.Empty : Guid.NewGuid().ToString();
var logIndex = body.TryGetProperty("logIndex", out var logIndexElement) ? logIndexElement.GetString() : null;
string? inclusionProof = null;
if (body.TryGetProperty("verification", out var verification) && verification.TryGetProperty("inclusionProof", out var inclusion))
{
inclusionProof = inclusion.GetProperty("logIndex").GetRawText();
}
return new TransparencyLogEntry(id, location, logIndex, inclusionProof);
}
}

View File

@@ -0,0 +1,13 @@
namespace StellaOps.Vexer.Attestation.Transparency;
public sealed class RekorHttpClientOptions
{
public string BaseAddress { get; set; } = "https://rekor.sigstore.dev";
public string? ApiKey { get; set; }
= null;
public int RetryCount { get; set; } = 3;
public TimeSpan RetryDelay { get; set; } = TimeSpan.FromSeconds(2);
}

View File

@@ -0,0 +1,108 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Vexer.Attestation.Dsse;
using StellaOps.Vexer.Attestation.Models;
using StellaOps.Vexer.Attestation.Signing;
using StellaOps.Vexer.Attestation.Transparency;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Attestation;
public sealed class VexAttestationClientOptions
{
public IReadOnlyDictionary<string, string> DefaultMetadata { get; set; } = ImmutableDictionary<string, string>.Empty;
}
public sealed class VexAttestationClient : IVexAttestationClient
{
private readonly VexDsseBuilder _builder;
private readonly ILogger<VexAttestationClient> _logger;
private readonly TimeProvider _timeProvider;
private readonly IReadOnlyDictionary<string, string> _defaultMetadata;
private readonly ITransparencyLogClient? _transparencyLogClient;
public VexAttestationClient(
VexDsseBuilder builder,
IOptions<VexAttestationClientOptions> options,
ILogger<VexAttestationClient> logger,
TimeProvider? timeProvider = null,
ITransparencyLogClient? transparencyLogClient = null)
{
_builder = builder ?? throw new ArgumentNullException(nameof(builder));
ArgumentNullException.ThrowIfNull(options);
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_defaultMetadata = options.Value.DefaultMetadata;
_transparencyLogClient = transparencyLogClient;
}
public async ValueTask<VexAttestationResponse> SignAsync(VexAttestationRequest request, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
var mergedMetadata = MergeMetadata(request.Metadata, _defaultMetadata);
var envelope = await _builder.CreateEnvelopeAsync(request, mergedMetadata, cancellationToken).ConfigureAwait(false);
var envelopeDigest = VexDsseBuilder.ComputeEnvelopeDigest(envelope);
var signedAt = _timeProvider.GetUtcNow();
var diagnosticsBuilder = ImmutableDictionary<string, string>.Empty
.Add("envelope", JsonSerializer.Serialize(envelope))
.Add("predicateType", "https://stella-ops.org/attestations/vex-export");
VexRekorReference? rekorReference = null;
if (_transparencyLogClient is not null)
{
try
{
var entry = await _transparencyLogClient.SubmitAsync(envelope, cancellationToken).ConfigureAwait(false);
rekorReference = new VexRekorReference("0.2", entry.Location, entry.LogIndex, entry.InclusionProofUrl is not null ? new Uri(entry.InclusionProofUrl) : null);
diagnosticsBuilder = diagnosticsBuilder.Add("rekorLocation", entry.Location);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to submit attestation to Rekor transparency log");
throw;
}
}
var metadata = new VexAttestationMetadata(
predicateType: "https://stella-ops.org/attestations/vex-export",
rekor: rekorReference,
envelopeDigest: envelopeDigest,
signedAt: signedAt);
_logger.LogInformation("Generated DSSE envelope for export {ExportId} ({Digest})", request.ExportId, envelopeDigest);
return new VexAttestationResponse(metadata, diagnosticsBuilder);
}
public ValueTask<VexAttestationVerification> VerifyAsync(VexAttestationRequest request, CancellationToken cancellationToken)
{
// Placeholder until verification flow is implemented in VEXER-ATTEST-01-003.
return ValueTask.FromResult(new VexAttestationVerification(true, ImmutableDictionary<string, string>.Empty));
}
private static IReadOnlyDictionary<string, string> MergeMetadata(
IReadOnlyDictionary<string, string> requestMetadata,
IReadOnlyDictionary<string, string> defaults)
{
if (defaults.Count == 0)
{
return requestMetadata;
}
var merged = new Dictionary<string, string>(defaults, StringComparer.Ordinal);
foreach (var kvp in requestMetadata)
{
merged[kvp.Key] = kvp.Value;
}
return merged.ToImmutableDictionary(StringComparer.Ordinal);
}
}

View File

@@ -8,5 +8,6 @@
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Vexer.Core\StellaOps.Vexer.Core.csproj" />
<ProjectReference Include="..\StellaOps.Vexer.Policy\StellaOps.Vexer.Policy.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,83 @@
using System;
using System.IO;
using System.Text;
using StellaOps.Vexer.Policy;
namespace StellaOps.Vexer.Core.Tests;
public sealed class VexPolicyBinderTests
{
private const string JsonPolicy = """
{
"version": "custom/v2",
"weights": {
"vendor": 0.95,
"distro": 0.85
},
"providerOverrides": {
"provider.example": 0.5
}
}
""";
private const string YamlPolicy = """
version: custom/v3
weights:
vendor: 0.8
distro: 0.7
platform: 0.6
providerOverrides:
provider-a: 0.4
provider-b: 0.3
""";
[Fact]
public void Bind_Json_ReturnsNormalizedOptions()
{
var result = VexPolicyBinder.Bind(JsonPolicy, VexPolicyDocumentFormat.Json);
Assert.True(result.Success);
Assert.NotNull(result.Options);
Assert.NotNull(result.NormalizedOptions);
Assert.Equal("custom/v2", result.Options!.Version);
Assert.Equal("custom/v2", result.NormalizedOptions!.Version);
Assert.Empty(result.Issues);
}
[Fact]
public void Bind_Yaml_ReturnsOverridesAndWarningsSorted()
{
var result = VexPolicyBinder.Bind(YamlPolicy, VexPolicyDocumentFormat.Yaml);
Assert.True(result.Success);
Assert.NotNull(result.NormalizedOptions);
var overrides = result.NormalizedOptions!.ProviderOverrides;
Assert.Equal(2, overrides.Count);
Assert.Equal(0.4, overrides["provider-a"]);
Assert.Equal(0.3, overrides["provider-b"]);
Assert.Empty(result.Issues);
}
[Fact]
public void Bind_InvalidJson_ReturnsError()
{
const string invalidJson = "{ \"weights\": { \"vendor\": \"not-a-number\" }";
var result = VexPolicyBinder.Bind(invalidJson, VexPolicyDocumentFormat.Json);
Assert.False(result.Success);
var issue = Assert.Single(result.Issues);
Assert.Equal(VexPolicyIssueSeverity.Error, issue.Severity);
Assert.StartsWith("policy.parse.json", issue.Code, StringComparison.Ordinal);
}
[Fact]
public void Bind_Stream_SupportsEncoding()
{
using var stream = new MemoryStream(Encoding.UTF8.GetBytes(JsonPolicy));
var result = VexPolicyBinder.Bind(stream, VexPolicyDocumentFormat.Json);
Assert.True(result.Success);
Assert.NotNull(result.Options);
}
}

View File

@@ -0,0 +1,169 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Vexer.Core;
using StellaOps.Vexer.Policy;
using System.Diagnostics.Metrics;
namespace StellaOps.Vexer.Core.Tests;
public class VexPolicyDiagnosticsTests
{
[Fact]
public void GetDiagnostics_ReportsCountsRecommendationsAndOverrides()
{
var overrides = new[]
{
new KeyValuePair<string, double>("provider-a", 0.8),
new KeyValuePair<string, double>("provider-b", 0.6),
};
var snapshot = new VexPolicySnapshot(
"custom/v1",
new VexConsensusPolicyOptions(
version: "custom/v1",
providerOverrides: overrides),
new BaselineVexConsensusPolicy(),
ImmutableArray.Create(
new VexPolicyIssue("sample.error", "Blocking issue.", VexPolicyIssueSeverity.Error),
new VexPolicyIssue("sample.warning", "Non-blocking issue.", VexPolicyIssueSeverity.Warning)),
"rev-test",
"ABCDEF");
var fakeProvider = new FakePolicyProvider(snapshot);
var fakeTime = new FakeTimeProvider(new DateTimeOffset(2025, 10, 16, 17, 0, 0, TimeSpan.Zero));
var diagnostics = new VexPolicyDiagnostics(fakeProvider, fakeTime);
var report = diagnostics.GetDiagnostics();
Assert.Equal("custom/v1", report.Version);
Assert.Equal("rev-test", report.RevisionId);
Assert.Equal("ABCDEF", report.Digest);
Assert.Equal(1, report.ErrorCount);
Assert.Equal(1, report.WarningCount);
Assert.Equal(fakeTime.GetUtcNow(), report.GeneratedAt);
Assert.Collection(report.Issues,
issue => Assert.Equal("sample.error", issue.Code),
issue => Assert.Equal("sample.warning", issue.Code));
Assert.Equal(new[] { "provider-a", "provider-b" }, report.ActiveOverrides.Keys.OrderBy(static key => key, StringComparer.Ordinal));
Assert.Contains(report.Recommendations, message => message.Contains("Resolve policy errors", StringComparison.OrdinalIgnoreCase));
Assert.Contains(report.Recommendations, message => message.Contains("provider-a", StringComparison.OrdinalIgnoreCase));
Assert.Contains(report.Recommendations, message => message.Contains("docs/ARCHITECTURE_VEXER.md", StringComparison.OrdinalIgnoreCase));
}
[Fact]
public void GetDiagnostics_WhenNoIssues_StillReturnsDefaultRecommendation()
{
var fakeProvider = new FakePolicyProvider(VexPolicySnapshot.Default);
var fakeTime = new FakeTimeProvider(new DateTimeOffset(2025, 10, 16, 17, 0, 0, TimeSpan.Zero));
var diagnostics = new VexPolicyDiagnostics(fakeProvider, fakeTime);
var report = diagnostics.GetDiagnostics();
Assert.Equal(0, report.ErrorCount);
Assert.Equal(0, report.WarningCount);
Assert.Empty(report.ActiveOverrides);
Assert.Single(report.Recommendations);
}
[Fact]
public void PolicyProvider_ComputesRevisionAndDigest_AndEmitsTelemetry()
{
using var listener = new MeterListener();
var reloadMeasurements = 0;
string? lastRevision = null;
listener.InstrumentPublished += (instrument, _) =>
{
if (instrument.Meter.Name == "StellaOps.Vexer.Policy" &&
instrument.Name == "vex.policy.reloads")
{
listener.EnableMeasurementEvents(instrument);
}
};
listener.SetMeasurementEventCallback<long>((instrument, measurement, tags, state) =>
{
reloadMeasurements++;
foreach (var tag in tags)
{
if (tag.Key is "revision" && tag.Value is string revision)
{
lastRevision = revision;
break;
}
}
});
listener.Start();
var optionsMonitor = new MutableOptionsMonitor<VexPolicyOptions>(new VexPolicyOptions());
var provider = new VexPolicyProvider(optionsMonitor, NullLogger<VexPolicyProvider>.Instance);
var snapshot1 = provider.GetSnapshot();
Assert.Equal("rev-1", snapshot1.RevisionId);
Assert.False(string.IsNullOrWhiteSpace(snapshot1.Digest));
var snapshot2 = provider.GetSnapshot();
Assert.Equal("rev-1", snapshot2.RevisionId);
Assert.Equal(snapshot1.Digest, snapshot2.Digest);
optionsMonitor.Update(new VexPolicyOptions
{
ProviderOverrides = new Dictionary<string, double>
{
["provider-a"] = 0.4
}
});
var snapshot3 = provider.GetSnapshot();
Assert.Equal("rev-2", snapshot3.RevisionId);
Assert.NotEqual(snapshot1.Digest, snapshot3.Digest);
listener.Dispose();
Assert.True(reloadMeasurements >= 2);
Assert.Equal("rev-2", lastRevision);
}
private sealed class FakePolicyProvider : IVexPolicyProvider
{
private readonly VexPolicySnapshot _snapshot;
public FakePolicyProvider(VexPolicySnapshot snapshot)
{
_snapshot = snapshot;
}
public VexPolicySnapshot GetSnapshot() => _snapshot;
}
private sealed class MutableOptionsMonitor<T> : IOptionsMonitor<T>
{
private T _value;
public MutableOptionsMonitor(T value)
{
_value = value;
}
public T CurrentValue => _value;
public T Get(string? name) => _value;
public void Update(T newValue) => _value = newValue;
public IDisposable OnChange(Action<T, string?> listener) => NullDisposable.Instance;
private sealed class NullDisposable : IDisposable
{
public static readonly NullDisposable Instance = new();
public void Dispose()
{
}
}
}
}

View File

@@ -5,3 +5,5 @@ If you are working on this file you need to read docs/ARCHITECTURE_VEXER.md and
|VEXER-CORE-01-001 Canonical VEX domain records|Team Vexer Core & Policy|docs/ARCHITECTURE_VEXER.md|DONE (2025-10-15) Introduced `VexClaim`, `VexConsensus`, provider metadata, export manifest records, and deterministic JSON serialization with tests covering canonical ordering and query signatures.|
|VEXER-CORE-01-002 Trust-weighted consensus resolver|Team Vexer Core & Policy|VEXER-CORE-01-001|DONE (2025-10-15) Added consensus resolver, baseline policy (tier weights + justification gate), telemetry output, and tests covering acceptance, conflict ties, and determinism.|
|VEXER-CORE-01-003 Shared contracts & query signatures|Team Vexer Core & Policy|VEXER-CORE-01-001|DONE (2025-10-15) Published connector/normalizer/exporter/attestation abstractions and expanded deterministic `VexQuerySignature`/hash utilities with test coverage.|
|VEXER-CORE-02-001 Context signal schema prep|Team Vexer Core & Policy|VEXER-POLICY-02-001|TODO Extend `VexClaim`/`VexConsensus` with optional severity/KEV/EPSS payloads, update canonical serializer/hashes, and coordinate migration notes with Storage.|
|VEXER-CORE-02-002 Deterministic risk scoring engine|Team Vexer Core & Policy|VEXER-CORE-02-001, VEXER-POLICY-02-001|BACKLOG Introduce the scoring calculator invoked by consensus, persist score envelopes with audit trails, and add regression fixtures covering gate/boost behaviour before enabling exports.|

View File

@@ -13,10 +13,12 @@ public interface IVexAttestationClient
}
public sealed record VexAttestationRequest(
string ExportId,
VexQuerySignature QuerySignature,
VexContentAddress Artifact,
VexExportFormat Format,
DateTimeOffset CreatedAt,
ImmutableArray<string> SourceProviders,
ImmutableDictionary<string, string> Metadata);
public sealed record VexAttestationResponse(

View File

@@ -0,0 +1,56 @@
using System;
namespace StellaOps.Vexer.Core;
/// <summary>
/// Cached export artifact metadata allowing reuse of previously generated manifests.
/// </summary>
public sealed class VexCacheEntry
{
public VexCacheEntry(
VexQuerySignature querySignature,
VexExportFormat format,
VexContentAddress artifact,
DateTimeOffset createdAt,
long sizeBytes,
string? manifestId = null,
string? gridFsObjectId = null,
DateTimeOffset? expiresAt = null)
{
QuerySignature = querySignature ?? throw new ArgumentNullException(nameof(querySignature));
Artifact = artifact ?? throw new ArgumentNullException(nameof(artifact));
Format = format;
CreatedAt = createdAt;
SizeBytes = sizeBytes >= 0
? sizeBytes
: throw new ArgumentOutOfRangeException(nameof(sizeBytes), sizeBytes, "Size must be non-negative.");
ManifestId = Normalize(manifestId);
GridFsObjectId = Normalize(gridFsObjectId);
if (expiresAt.HasValue && expiresAt.Value < createdAt)
{
throw new ArgumentOutOfRangeException(nameof(expiresAt), expiresAt, "Expiration cannot be before creation.");
}
ExpiresAt = expiresAt;
}
public VexQuerySignature QuerySignature { get; }
public VexExportFormat Format { get; }
public VexContentAddress Artifact { get; }
public DateTimeOffset CreatedAt { get; }
public long SizeBytes { get; }
public string? ManifestId { get; }
public string? GridFsObjectId { get; }
public DateTimeOffset? ExpiresAt { get; }
private static string? Normalize(string? value)
=> string.IsNullOrWhiteSpace(value) ? null : value.Trim();
}

View File

@@ -13,7 +13,9 @@ public sealed record VexConsensus
IEnumerable<VexConsensusSource> sources,
IEnumerable<VexConsensusConflict>? conflicts = null,
string? policyVersion = null,
string? summary = null)
string? summary = null,
string? policyRevisionId = null,
string? policyDigest = null)
{
if (string.IsNullOrWhiteSpace(vulnerabilityId))
{
@@ -28,6 +30,8 @@ public sealed record VexConsensus
Conflicts = NormalizeConflicts(conflicts);
PolicyVersion = string.IsNullOrWhiteSpace(policyVersion) ? null : policyVersion.Trim();
Summary = string.IsNullOrWhiteSpace(summary) ? null : summary.Trim();
PolicyRevisionId = string.IsNullOrWhiteSpace(policyRevisionId) ? null : policyRevisionId.Trim();
PolicyDigest = string.IsNullOrWhiteSpace(policyDigest) ? null : policyDigest.Trim();
}
public string VulnerabilityId { get; }
@@ -46,6 +50,10 @@ public sealed record VexConsensus
public string? Summary { get; }
public string? PolicyRevisionId { get; }
public string? PolicyDigest { get; }
private static ImmutableArray<VexConsensusSource> NormalizeSources(IEnumerable<VexConsensusSource> sources)
{
if (sources is null)

View File

@@ -106,7 +106,9 @@ public sealed class VexConsensusResolver
acceptedSources,
AttachConflictDetails(conflicts, acceptedSources, consensusStatus, conflictKeys),
_policy.Version,
summary);
summary,
request.PolicyRevisionId,
request.PolicyDigest);
return new VexConsensusResolution(consensus, decisions.ToImmutable());
}
@@ -272,7 +274,9 @@ public sealed record VexConsensusRequest(
VexProduct Product,
IReadOnlyList<VexClaim> Claims,
IReadOnlyDictionary<string, VexProvider> Providers,
DateTimeOffset CalculatedAt);
DateTimeOffset CalculatedAt,
string? PolicyRevisionId = null,
string? PolicyDigest = null);
public sealed record VexConsensusResolution(
VexConsensus Consensus,

View File

@@ -38,6 +38,56 @@ public sealed class ExportEngineTests
Assert.Equal(manifest.ExportId, cached.ExportId);
}
[Fact]
public async Task ExportAsync_ForceRefreshInvalidatesCacheEntry()
{
var store = new InMemoryExportStore();
var evaluator = new StaticPolicyEvaluator("baseline/v1");
var dataSource = new InMemoryExportDataSource();
var exporter = new DummyExporter(VexExportFormat.Json);
var cacheIndex = new RecordingCacheIndex();
var engine = new VexExportEngine(store, evaluator, dataSource, new[] { exporter }, NullLogger<VexExportEngine>.Instance, cacheIndex);
var query = VexQuery.Create(new[] { new VexQueryFilter("vulnId", "CVE-2025-0001") });
var initialContext = new VexExportRequestContext(query, VexExportFormat.Json, DateTimeOffset.UtcNow);
_ = await engine.ExportAsync(initialContext, CancellationToken.None);
var refreshContext = new VexExportRequestContext(query, VexExportFormat.Json, DateTimeOffset.UtcNow.AddMinutes(1), ForceRefresh: true);
var refreshed = await engine.ExportAsync(refreshContext, CancellationToken.None);
Assert.False(refreshed.FromCache);
var signature = VexQuerySignature.FromQuery(refreshContext.Query);
Assert.True(cacheIndex.RemoveCalls.TryGetValue((signature.Value, refreshContext.Format), out var removed));
Assert.True(removed);
}
[Fact]
public async Task ExportAsync_WritesArtifactsToAllStores()
{
var store = new InMemoryExportStore();
var evaluator = new StaticPolicyEvaluator("baseline/v1");
var dataSource = new InMemoryExportDataSource();
var exporter = new DummyExporter(VexExportFormat.Json);
var recorder1 = new RecordingArtifactStore();
var recorder2 = new RecordingArtifactStore();
var engine = new VexExportEngine(
store,
evaluator,
dataSource,
new[] { exporter },
NullLogger<VexExportEngine>.Instance,
cacheIndex: null,
artifactStores: new[] { recorder1, recorder2 });
var query = VexQuery.Create(new[] { new VexQueryFilter("vulnId", "CVE-2025-0001") });
var context = new VexExportRequestContext(query, VexExportFormat.Json, DateTimeOffset.UtcNow);
await engine.ExportAsync(context, CancellationToken.None);
Assert.Equal(1, recorder1.SaveCount);
Assert.Equal(1, recorder2.SaveCount);
}
private sealed class InMemoryExportStore : IVexExportStore
{
private readonly Dictionary<string, VexExportManifest> _store = new(StringComparer.Ordinal);
@@ -60,6 +110,40 @@ public sealed class ExportEngineTests
=> FormattableString.Invariant($"{signature}|{format}");
}
private sealed class RecordingCacheIndex : IVexCacheIndex
{
public Dictionary<(string Signature, VexExportFormat Format), bool> RemoveCalls { get; } = new();
public ValueTask<VexCacheEntry?> FindAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken)
=> ValueTask.FromResult<VexCacheEntry?>(null);
public ValueTask SaveAsync(VexCacheEntry entry, CancellationToken cancellationToken)
=> ValueTask.CompletedTask;
public ValueTask RemoveAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken)
{
RemoveCalls[(signature.Value, format)] = true;
return ValueTask.CompletedTask;
}
}
private sealed class RecordingArtifactStore : IVexArtifactStore
{
public int SaveCount { get; private set; }
public ValueTask<VexStoredArtifact> SaveAsync(VexExportArtifact artifact, CancellationToken cancellationToken)
{
SaveCount++;
return ValueTask.FromResult(new VexStoredArtifact(artifact.ContentAddress, "memory", artifact.Content.Length, artifact.Metadata));
}
public ValueTask DeleteAsync(VexContentAddress contentAddress, CancellationToken cancellationToken)
=> ValueTask.CompletedTask;
public ValueTask<Stream?> OpenReadAsync(VexContentAddress contentAddress, CancellationToken cancellationToken)
=> ValueTask.FromResult<Stream?>(null);
}
private sealed class StaticPolicyEvaluator : IVexPolicyEvaluator
{
public StaticPolicyEvaluator(string version)

View File

@@ -0,0 +1,33 @@
using System.Collections.Immutable;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Vexer.Core;
using StellaOps.Vexer.Export;
using System.IO.Abstractions.TestingHelpers;
namespace StellaOps.Vexer.Export.Tests;
public sealed class FileSystemArtifactStoreTests
{
[Fact]
public async Task SaveAsync_WritesArtifactToDisk()
{
var fs = new MockFileSystem();
var options = Options.Create(new FileSystemArtifactStoreOptions { RootPath = "/exports" });
var store = new FileSystemArtifactStore(options, NullLogger<FileSystemArtifactStore>.Instance, fs);
var content = new byte[] { 1, 2, 3 };
var artifact = new VexExportArtifact(
new VexContentAddress("sha256", "deadbeef"),
VexExportFormat.Json,
content,
ImmutableDictionary<string, string>.Empty);
var stored = await store.SaveAsync(artifact, CancellationToken.None);
Assert.Equal(artifact.Content.Length, stored.SizeBytes);
var filePath = fs.Path.Combine(options.Value.RootPath, stored.Location);
Assert.True(fs.FileExists(filePath));
Assert.Equal(content, fs.File.ReadAllBytes(filePath));
}
}

View File

@@ -0,0 +1,59 @@
using System.Collections.Immutable;
using System.IO.Abstractions.TestingHelpers;
using System.Linq;
using System.Text.Json;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Vexer.Core;
using StellaOps.Vexer.Export;
namespace StellaOps.Vexer.Export.Tests;
public sealed class OfflineBundleArtifactStoreTests
{
[Fact]
public async Task SaveAsync_WritesArtifactAndManifest()
{
var fs = new MockFileSystem();
var options = Options.Create(new OfflineBundleArtifactStoreOptions { RootPath = "/offline" });
var store = new OfflineBundleArtifactStore(options, NullLogger<OfflineBundleArtifactStore>.Instance, fs);
var content = new byte[] { 1, 2, 3 };
var digest = "sha256:" + Convert.ToHexString(System.Security.Cryptography.SHA256.HashData(content)).ToLowerInvariant();
var artifact = new VexExportArtifact(
new VexContentAddress("sha256", digest.Split(':')[1]),
VexExportFormat.Json,
content,
ImmutableDictionary<string, string>.Empty);
var stored = await store.SaveAsync(artifact, CancellationToken.None);
var artifactPath = fs.Path.Combine(options.Value.RootPath, stored.Location);
Assert.True(fs.FileExists(artifactPath));
var manifestPath = fs.Path.Combine(options.Value.RootPath, options.Value.ManifestFileName);
Assert.True(fs.FileExists(manifestPath));
await using var manifestStream = fs.File.OpenRead(manifestPath);
using var document = await JsonDocument.ParseAsync(manifestStream);
var artifacts = document.RootElement.GetProperty("artifacts");
Assert.True(artifacts.GetArrayLength() >= 1);
var first = artifacts.EnumerateArray().First();
Assert.Equal(digest, first.GetProperty("digest").GetString());
}
[Fact]
public async Task SaveAsync_ThrowsOnDigestMismatch()
{
var fs = new MockFileSystem();
var options = Options.Create(new OfflineBundleArtifactStoreOptions { RootPath = "/offline" });
var store = new OfflineBundleArtifactStore(options, NullLogger<OfflineBundleArtifactStore>.Instance, fs);
var artifact = new VexExportArtifact(
new VexContentAddress("sha256", "deadbeef"),
VexExportFormat.Json,
new byte[] { 0x01, 0x02 },
ImmutableDictionary<string, string>.Empty);
await Assert.ThrowsAsync<InvalidOperationException>(() => store.SaveAsync(artifact, CancellationToken.None).AsTask());
}
}

View File

@@ -0,0 +1,95 @@
using System.Collections.Concurrent;
using System.Collections.Immutable;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Vexer.Core;
using StellaOps.Vexer.Export;
namespace StellaOps.Vexer.Export.Tests;
public sealed class S3ArtifactStoreTests
{
[Fact]
public async Task SaveAsync_UploadsContentWithMetadata()
{
var client = new FakeS3Client();
var options = Options.Create(new S3ArtifactStoreOptions { BucketName = "exports", Prefix = "vex" });
var store = new S3ArtifactStore(client, options, NullLogger<S3ArtifactStore>.Instance);
var content = new byte[] { 1, 2, 3, 4 };
var artifact = new VexExportArtifact(
new VexContentAddress("sha256", "deadbeef"),
VexExportFormat.Json,
content,
ImmutableDictionary<string, string>.Empty);
await store.SaveAsync(artifact, CancellationToken.None);
Assert.True(client.PutCalls.TryGetValue("exports", out var bucketEntries));
Assert.NotNull(bucketEntries);
var entry = bucketEntries!.Single();
Assert.Equal("vex/json/deadbeef.json", entry.Key);
Assert.Equal(content, entry.Content);
Assert.Equal("sha256:deadbeef", entry.Metadata["vex-digest"]);
}
[Fact]
public async Task OpenReadAsync_ReturnsStoredContent()
{
var client = new FakeS3Client();
var options = Options.Create(new S3ArtifactStoreOptions { BucketName = "exports", Prefix = "vex" });
var store = new S3ArtifactStore(client, options, NullLogger<S3ArtifactStore>.Instance);
var address = new VexContentAddress("sha256", "cafebabe");
client.SeedObject("exports", "vex/json/cafebabe.json", new byte[] { 9, 9, 9 });
var stream = await store.OpenReadAsync(address, CancellationToken.None);
Assert.NotNull(stream);
using var ms = new MemoryStream();
await stream!.CopyToAsync(ms);
Assert.Equal(new byte[] { 9, 9, 9 }, ms.ToArray());
}
private sealed class FakeS3Client : IS3ArtifactClient
{
public ConcurrentDictionary<string, List<S3Entry>> PutCalls { get; } = new(StringComparer.Ordinal);
private readonly ConcurrentDictionary<(string Bucket, string Key), byte[]> _storage = new();
public void SeedObject(string bucket, string key, byte[] content)
{
PutCalls.GetOrAdd(bucket, _ => new List<S3Entry>()).Add(new S3Entry(key, content, new Dictionary<string, string>()));
_storage[(bucket, key)] = content;
}
public Task<bool> ObjectExistsAsync(string bucketName, string key, CancellationToken cancellationToken)
=> Task.FromResult(_storage.ContainsKey((bucketName, key)));
public Task PutObjectAsync(string bucketName, string key, Stream content, IDictionary<string, string> metadata, CancellationToken cancellationToken)
{
using var ms = new MemoryStream();
content.CopyTo(ms);
var bytes = ms.ToArray();
PutCalls.GetOrAdd(bucketName, _ => new List<S3Entry>()).Add(new S3Entry(key, bytes, new Dictionary<string, string>(metadata)));
_storage[(bucketName, key)] = bytes;
return Task.CompletedTask;
}
public Task<Stream?> GetObjectAsync(string bucketName, string key, CancellationToken cancellationToken)
{
if (_storage.TryGetValue((bucketName, key), out var bytes))
{
return Task.FromResult<Stream?>(new MemoryStream(bytes, writable: false));
}
return Task.FromResult<Stream?>(null);
}
public Task DeleteObjectAsync(string bucketName, string key, CancellationToken cancellationToken)
{
_storage.TryRemove((bucketName, key), out _);
return Task.CompletedTask;
}
public readonly record struct S3Entry(string Key, byte[] Content, IDictionary<string, string> Metadata);
}
}

View File

@@ -6,6 +6,9 @@
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Vexer.Export\StellaOps.Vexer.Export.csproj" />
</ItemGroup>

View File

@@ -0,0 +1,81 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Vexer.Core;
using StellaOps.Vexer.Export;
using StellaOps.Vexer.Storage.Mongo;
namespace StellaOps.Vexer.Export.Tests;
public sealed class VexExportCacheServiceTests
{
[Fact]
public async Task InvalidateAsync_RemovesEntry()
{
var cacheIndex = new RecordingIndex();
var maintenance = new StubMaintenance();
var service = new VexExportCacheService(cacheIndex, maintenance, NullLogger<VexExportCacheService>.Instance);
var signature = new VexQuerySignature("format=json|provider=vendor");
await service.InvalidateAsync(signature, VexExportFormat.Json, CancellationToken.None);
Assert.Equal(signature.Value, cacheIndex.LastSignature?.Value);
Assert.Equal(VexExportFormat.Json, cacheIndex.LastFormat);
Assert.Equal(1, cacheIndex.RemoveCalls);
}
[Fact]
public async Task PruneExpiredAsync_ReturnsCount()
{
var cacheIndex = new RecordingIndex();
var maintenance = new StubMaintenance { ExpiredCount = 3 };
var service = new VexExportCacheService(cacheIndex, maintenance, NullLogger<VexExportCacheService>.Instance);
var removed = await service.PruneExpiredAsync(DateTimeOffset.UtcNow, CancellationToken.None);
Assert.Equal(3, removed);
}
[Fact]
public async Task PruneDanglingAsync_ReturnsCount()
{
var cacheIndex = new RecordingIndex();
var maintenance = new StubMaintenance { DanglingCount = 2 };
var service = new VexExportCacheService(cacheIndex, maintenance, NullLogger<VexExportCacheService>.Instance);
var removed = await service.PruneDanglingAsync(CancellationToken.None);
Assert.Equal(2, removed);
}
private sealed class RecordingIndex : IVexCacheIndex
{
public VexQuerySignature? LastSignature { get; private set; }
public VexExportFormat LastFormat { get; private set; }
public int RemoveCalls { get; private set; }
public ValueTask<VexCacheEntry?> FindAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken)
=> ValueTask.FromResult<VexCacheEntry?>(null);
public ValueTask SaveAsync(VexCacheEntry entry, CancellationToken cancellationToken)
=> ValueTask.CompletedTask;
public ValueTask RemoveAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken)
{
LastSignature = signature;
LastFormat = format;
RemoveCalls++;
return ValueTask.CompletedTask;
}
}
private sealed class StubMaintenance : IVexCacheMaintenance
{
public int ExpiredCount { get; set; }
public int DanglingCount { get; set; }
public ValueTask<int> RemoveExpiredAsync(DateTimeOffset asOf, CancellationToken cancellationToken)
=> ValueTask.FromResult(ExpiredCount);
public ValueTask<int> RemoveMissingManifestReferencesAsync(CancellationToken cancellationToken)
=> ValueTask.FromResult(DanglingCount);
}
}

View File

@@ -37,18 +37,24 @@ public sealed class VexExportEngine : IExportEngine
private readonly IVexExportDataSource _dataSource;
private readonly IReadOnlyDictionary<VexExportFormat, IVexExporter> _exporters;
private readonly ILogger<VexExportEngine> _logger;
private readonly IVexCacheIndex? _cacheIndex;
private readonly IReadOnlyList<IVexArtifactStore> _artifactStores;
public VexExportEngine(
IVexExportStore exportStore,
IVexPolicyEvaluator policyEvaluator,
IVexExportDataSource dataSource,
IEnumerable<IVexExporter> exporters,
ILogger<VexExportEngine> logger)
ILogger<VexExportEngine> logger,
IVexCacheIndex? cacheIndex = null,
IEnumerable<IVexArtifactStore>? artifactStores = null)
{
_exportStore = exportStore ?? throw new ArgumentNullException(nameof(exportStore));
_policyEvaluator = policyEvaluator ?? throw new ArgumentNullException(nameof(policyEvaluator));
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cacheIndex = cacheIndex;
_artifactStores = artifactStores?.ToArray() ?? Array.Empty<IVexArtifactStore>();
if (exporters is null)
{
@@ -69,9 +75,25 @@ public sealed class VexExportEngine : IExportEngine
if (cached is not null)
{
_logger.LogInformation("Reusing cached export for {Signature} ({Format})", signature.Value, context.Format);
return cached with { FromCache = true };
return new VexExportManifest(
cached.ExportId,
cached.QuerySignature,
cached.Format,
cached.CreatedAt,
cached.Artifact,
cached.ClaimCount,
cached.SourceProviders,
fromCache: true,
cached.ConsensusRevision,
cached.Attestation,
cached.SizeBytes);
}
}
else if (_cacheIndex is not null)
{
await _cacheIndex.RemoveAsync(signature, context.Format, cancellationToken).ConfigureAwait(false);
_logger.LogInformation("Force refresh requested; invalidated cache entry for {Signature} ({Format})", signature.Value, context.Format);
}
var dataset = await _dataSource.FetchAsync(context.Query, cancellationToken).ConfigureAwait(false);
var exporter = ResolveExporter(context.Format);
@@ -87,6 +109,31 @@ public sealed class VexExportEngine : IExportEngine
await using var buffer = new MemoryStream();
var result = await exporter.SerializeAsync(exportRequest, buffer, cancellationToken).ConfigureAwait(false);
if (_artifactStores.Count > 0)
{
var writtenBytes = buffer.ToArray();
try
{
var artifact = new VexExportArtifact(
result.Digest,
context.Format,
writtenBytes,
result.Metadata);
foreach (var store in _artifactStores)
{
await store.SaveAsync(artifact, cancellationToken).ConfigureAwait(false);
}
_logger.LogInformation("Stored export artifact {Digest} via {StoreCount} store(s)", result.Digest.ToUri(), _artifactStores.Count);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to store export artifact {Digest}", result.Digest.ToUri());
throw;
}
}
var exportId = FormattableString.Invariant($"exports/{context.RequestedAt:yyyyMMddTHHmmssfffZ}/{digest.Digest}");
var manifest = new VexExportManifest(
exportId,
@@ -123,6 +170,7 @@ public static class VexExportServiceCollectionExtensions
public static IServiceCollection AddVexExportEngine(this IServiceCollection services)
{
services.AddSingleton<IExportEngine, VexExportEngine>();
services.AddVexExportCacheServices();
return services;
}
}

View File

@@ -0,0 +1,159 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Abstractions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Export;
public sealed class FileSystemArtifactStoreOptions
{
public string RootPath { get; set; } = ".";
public bool OverwriteExisting { get; set; } = false;
}
public sealed class FileSystemArtifactStore : IVexArtifactStore
{
private readonly IFileSystem _fileSystem;
private readonly FileSystemArtifactStoreOptions _options;
private readonly ILogger<FileSystemArtifactStore> _logger;
public FileSystemArtifactStore(
IOptions<FileSystemArtifactStoreOptions> options,
ILogger<FileSystemArtifactStore> logger,
IFileSystem? fileSystem = null)
{
ArgumentNullException.ThrowIfNull(options);
_options = options.Value;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_fileSystem = fileSystem ?? new FileSystem();
if (string.IsNullOrWhiteSpace(_options.RootPath))
{
throw new ArgumentException("RootPath must be provided for FileSystemArtifactStore.", nameof(options));
}
var root = _fileSystem.Path.GetFullPath(_options.RootPath);
_fileSystem.Directory.CreateDirectory(root);
_options.RootPath = root;
}
public async ValueTask<VexStoredArtifact> SaveAsync(VexExportArtifact artifact, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(artifact);
var relativePath = BuildArtifactPath(artifact.ContentAddress, artifact.Format);
var destination = _fileSystem.Path.Combine(_options.RootPath, relativePath);
var directory = _fileSystem.Path.GetDirectoryName(destination);
if (!string.IsNullOrEmpty(directory))
{
_fileSystem.Directory.CreateDirectory(directory);
}
if (_fileSystem.File.Exists(destination) && !_options.OverwriteExisting)
{
_logger.LogInformation("Artifact {Digest} already exists at {Path}; skipping write.", artifact.ContentAddress.ToUri(), destination);
}
else
{
await using var stream = _fileSystem.File.Create(destination);
await stream.WriteAsync(artifact.Content, cancellationToken).ConfigureAwait(false);
}
var location = destination.Replace(_options.RootPath, string.Empty).TrimStart(_fileSystem.Path.DirectorySeparatorChar, _fileSystem.Path.AltDirectorySeparatorChar);
return new VexStoredArtifact(
artifact.ContentAddress,
location,
artifact.Content.Length,
artifact.Metadata);
}
public ValueTask DeleteAsync(VexContentAddress contentAddress, CancellationToken cancellationToken)
{
var path = MaterializePath(contentAddress);
if (path is not null && _fileSystem.File.Exists(path))
{
_fileSystem.File.Delete(path);
}
return ValueTask.CompletedTask;
}
public ValueTask<Stream?> OpenReadAsync(VexContentAddress contentAddress, CancellationToken cancellationToken)
{
var path = MaterializePath(contentAddress);
if (path is null || !_fileSystem.File.Exists(path))
{
return ValueTask.FromResult<Stream?>(null);
}
Stream stream = _fileSystem.File.OpenRead(path);
return ValueTask.FromResult<Stream?>(stream);
}
private static string BuildArtifactPath(VexContentAddress address, VexExportFormat format)
{
var formatSegment = format.ToString().ToLowerInvariant();
var safeDigest = address.Digest.Replace(':', '_');
var extension = GetExtension(format);
return Path.Combine(formatSegment, safeDigest + extension);
}
private string? MaterializePath(VexContentAddress address)
{
ArgumentNullException.ThrowIfNull(address);
var sanitized = address.Digest.Replace(':', '_');
foreach (VexExportFormat format in Enum.GetValues(typeof(VexExportFormat)))
{
var candidate = _fileSystem.Path.Combine(_options.RootPath, format.ToString().ToLowerInvariant(), sanitized + GetExtension(format));
if (_fileSystem.File.Exists(candidate))
{
return candidate;
}
}
// fallback: direct root search with common extensions
foreach (var extension in new[] { ".json", ".jsonl" })
{
var candidate = _fileSystem.Path.Combine(_options.RootPath, sanitized + extension);
if (_fileSystem.File.Exists(candidate))
{
return candidate;
}
}
return null;
}
private static string GetExtension(VexExportFormat format)
=> format switch
{
VexExportFormat.Json => ".json",
VexExportFormat.JsonLines => ".jsonl",
VexExportFormat.OpenVex => ".json",
VexExportFormat.Csaf => ".json",
_ => ".bin",
};
}
public static class FileSystemArtifactStoreServiceCollectionExtensions
{
public static IServiceCollection AddVexFileSystemArtifactStore(this IServiceCollection services, Action<FileSystemArtifactStoreOptions>? configure = null)
{
if (configure is not null)
{
services.Configure(configure);
}
services.AddSingleton<IVexArtifactStore, FileSystemArtifactStore>();
return services;
}
}

View File

@@ -0,0 +1,28 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Export;
public sealed record VexExportArtifact(
VexContentAddress ContentAddress,
VexExportFormat Format,
ReadOnlyMemory<byte> Content,
IReadOnlyDictionary<string, string> Metadata);
public sealed record VexStoredArtifact(
VexContentAddress ContentAddress,
string Location,
long SizeBytes,
IReadOnlyDictionary<string, string> Metadata);
public interface IVexArtifactStore
{
ValueTask<VexStoredArtifact> SaveAsync(VexExportArtifact artifact, CancellationToken cancellationToken);
ValueTask DeleteAsync(VexContentAddress contentAddress, CancellationToken cancellationToken);
ValueTask<Stream?> OpenReadAsync(VexContentAddress contentAddress, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,243 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.IO.Abstractions;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Export;
public sealed class OfflineBundleArtifactStoreOptions
{
public string RootPath { get; set; } = ".";
public string ArtifactsFolder { get; set; } = "artifacts";
public string BundlesFolder { get; set; } = "bundles";
public string ManifestFileName { get; set; } = "offline-manifest.json";
}
public sealed class OfflineBundleArtifactStore : IVexArtifactStore
{
private readonly IFileSystem _fileSystem;
private readonly OfflineBundleArtifactStoreOptions _options;
private readonly ILogger<OfflineBundleArtifactStore> _logger;
private readonly JsonSerializerOptions _serializerOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = true,
};
public OfflineBundleArtifactStore(
IOptions<OfflineBundleArtifactStoreOptions> options,
ILogger<OfflineBundleArtifactStore> logger,
IFileSystem? fileSystem = null)
{
ArgumentNullException.ThrowIfNull(options);
_options = options.Value;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_fileSystem = fileSystem ?? new FileSystem();
if (string.IsNullOrWhiteSpace(_options.RootPath))
{
throw new ArgumentException("RootPath must be provided for OfflineBundleArtifactStore.", nameof(options));
}
var root = _fileSystem.Path.GetFullPath(_options.RootPath);
_fileSystem.Directory.CreateDirectory(root);
_options.RootPath = root;
_fileSystem.Directory.CreateDirectory(GetArtifactsRoot());
_fileSystem.Directory.CreateDirectory(GetBundlesRoot());
}
public async ValueTask<VexStoredArtifact> SaveAsync(VexExportArtifact artifact, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(artifact);
EnforceDigestMatch(artifact);
var artifactRelativePath = BuildArtifactRelativePath(artifact);
var artifactFullPath = _fileSystem.Path.Combine(_options.RootPath, artifactRelativePath);
var artifactDirectory = _fileSystem.Path.GetDirectoryName(artifactFullPath);
if (!string.IsNullOrEmpty(artifactDirectory))
{
_fileSystem.Directory.CreateDirectory(artifactDirectory);
}
await using (var stream = _fileSystem.File.Create(artifactFullPath))
{
await stream.WriteAsync(artifact.Content, cancellationToken).ConfigureAwait(false);
}
WriteOfflineBundle(artifactRelativePath, artifact, cancellationToken);
await UpdateManifestAsync(artifactRelativePath, artifact, cancellationToken).ConfigureAwait(false);
_logger.LogInformation("Stored offline artifact {Digest} at {Path}", artifact.ContentAddress.ToUri(), artifactRelativePath);
return new VexStoredArtifact(
artifact.ContentAddress,
artifactRelativePath,
artifact.Content.Length,
artifact.Metadata);
}
public ValueTask DeleteAsync(VexContentAddress contentAddress, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(contentAddress);
var sanitized = contentAddress.Digest.Replace(':', '_');
var artifactsRoot = GetArtifactsRoot();
foreach (VexExportFormat format in Enum.GetValues(typeof(VexExportFormat)))
{
var extension = GetExtension(format);
var path = _fileSystem.Path.Combine(artifactsRoot, format.ToString().ToLowerInvariant(), sanitized + extension);
if (_fileSystem.File.Exists(path))
{
_fileSystem.File.Delete(path);
}
var bundlePath = _fileSystem.Path.Combine(GetBundlesRoot(), sanitized + ".zip");
if (_fileSystem.File.Exists(bundlePath))
{
_fileSystem.File.Delete(bundlePath);
}
}
return ValueTask.CompletedTask;
}
public ValueTask<Stream?> OpenReadAsync(VexContentAddress contentAddress, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(contentAddress);
var artifactsRoot = GetArtifactsRoot();
var sanitized = contentAddress.Digest.Replace(':', '_');
foreach (VexExportFormat format in Enum.GetValues(typeof(VexExportFormat)))
{
var candidate = _fileSystem.Path.Combine(artifactsRoot, format.ToString().ToLowerInvariant(), sanitized + GetExtension(format));
if (_fileSystem.File.Exists(candidate))
{
return ValueTask.FromResult<Stream?>(_fileSystem.File.OpenRead(candidate));
}
}
return ValueTask.FromResult<Stream?>(null);
}
private void EnforceDigestMatch(VexExportArtifact artifact)
{
if (!artifact.ContentAddress.Algorithm.Equals("sha256", StringComparison.OrdinalIgnoreCase))
{
return;
}
using var sha = SHA256.Create();
var computed = "sha256:" + Convert.ToHexString(sha.ComputeHash(artifact.Content.ToArray())).ToLowerInvariant();
if (!string.Equals(computed, artifact.ContentAddress.ToUri(), StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"Artifact content digest mismatch. Expected {artifact.ContentAddress.ToUri()} but computed {computed}.");
}
}
private string BuildArtifactRelativePath(VexExportArtifact artifact)
{
var sanitized = artifact.ContentAddress.Digest.Replace(':', '_');
var folder = _fileSystem.Path.Combine(_options.ArtifactsFolder, artifact.Format.ToString().ToLowerInvariant());
return _fileSystem.Path.Combine(folder, sanitized + GetExtension(artifact.Format));
}
private void WriteOfflineBundle(string artifactRelativePath, VexExportArtifact artifact, CancellationToken cancellationToken)
{
var zipPath = _fileSystem.Path.Combine(GetBundlesRoot(), artifact.ContentAddress.Digest.Replace(':', '_') + ".zip");
using var zipStream = _fileSystem.File.Create(zipPath);
using var archive = new ZipArchive(zipStream, ZipArchiveMode.Create);
var entry = archive.CreateEntry(artifactRelativePath, CompressionLevel.Optimal);
using (var entryStream = entry.Open())
{
entryStream.Write(artifact.Content.Span);
}
// embed metadata file
var metadataEntry = archive.CreateEntry("metadata.json", CompressionLevel.Optimal);
using var metadataStream = new StreamWriter(metadataEntry.Open());
var metadata = new Dictionary<string, object?>
{
["digest"] = artifact.ContentAddress.ToUri(),
["format"] = artifact.Format.ToString().ToLowerInvariant(),
["sizeBytes"] = artifact.Content.Length,
["metadata"] = artifact.Metadata,
};
metadataStream.Write(JsonSerializer.Serialize(metadata, _serializerOptions));
}
private async Task UpdateManifestAsync(string artifactRelativePath, VexExportArtifact artifact, CancellationToken cancellationToken)
{
var manifestPath = _fileSystem.Path.Combine(_options.RootPath, _options.ManifestFileName);
var records = new List<ManifestEntry>();
if (_fileSystem.File.Exists(manifestPath))
{
await using var existingStream = _fileSystem.File.OpenRead(manifestPath);
var existing = await JsonSerializer.DeserializeAsync<ManifestDocument>(existingStream, _serializerOptions, cancellationToken).ConfigureAwait(false);
if (existing is not null)
{
records.AddRange(existing.Artifacts);
}
}
records.RemoveAll(x => string.Equals(x.Digest, artifact.ContentAddress.ToUri(), StringComparison.OrdinalIgnoreCase));
records.Add(new ManifestEntry(
artifact.ContentAddress.ToUri(),
artifact.Format.ToString().ToLowerInvariant(),
artifactRelativePath.Replace("\\", "/"),
artifact.Content.Length,
artifact.Metadata));
records.Sort(static (a, b) => string.CompareOrdinal(a.Digest, b.Digest));
var doc = new ManifestDocument(records.ToImmutableArray());
await using var stream = _fileSystem.File.Create(manifestPath);
await JsonSerializer.SerializeAsync(stream, doc, _serializerOptions, cancellationToken).ConfigureAwait(false);
}
private string GetArtifactsRoot() => _fileSystem.Path.Combine(_options.RootPath, _options.ArtifactsFolder);
private string GetBundlesRoot() => _fileSystem.Path.Combine(_options.RootPath, _options.BundlesFolder);
private static string GetExtension(VexExportFormat format)
=> format switch
{
VexExportFormat.Json => ".json",
VexExportFormat.JsonLines => ".jsonl",
VexExportFormat.OpenVex => ".json",
VexExportFormat.Csaf => ".json",
_ => ".bin",
};
private sealed record ManifestDocument(ImmutableArray<ManifestEntry> Artifacts);
private sealed record ManifestEntry(string Digest, string Format, string Path, long SizeBytes, IReadOnlyDictionary<string, string> Metadata);
}
public static class OfflineBundleArtifactStoreServiceCollectionExtensions
{
public static IServiceCollection AddVexOfflineBundleArtifactStore(this IServiceCollection services, Action<OfflineBundleArtifactStoreOptions>? configure = null)
{
if (configure is not null)
{
services.Configure(configure);
}
services.AddSingleton<IVexArtifactStore, OfflineBundleArtifactStore>();
return services;
}
}

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Vexer.Export.Tests")]

View File

@@ -0,0 +1,181 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Export;
public sealed class S3ArtifactStoreOptions
{
public string BucketName { get; set; } = string.Empty;
public string? Prefix { get; set; }
= null;
public bool OverwriteExisting { get; set; }
= true;
}
public interface IS3ArtifactClient
{
Task<bool> ObjectExistsAsync(string bucketName, string key, CancellationToken cancellationToken);
Task PutObjectAsync(string bucketName, string key, Stream content, IDictionary<string, string> metadata, CancellationToken cancellationToken);
Task<Stream?> GetObjectAsync(string bucketName, string key, CancellationToken cancellationToken);
Task DeleteObjectAsync(string bucketName, string key, CancellationToken cancellationToken);
}
public sealed class S3ArtifactStore : IVexArtifactStore
{
private readonly IS3ArtifactClient _client;
private readonly S3ArtifactStoreOptions _options;
private readonly ILogger<S3ArtifactStore> _logger;
public S3ArtifactStore(
IS3ArtifactClient client,
IOptions<S3ArtifactStoreOptions> options,
ILogger<S3ArtifactStore> logger)
{
_client = client ?? throw new ArgumentNullException(nameof(client));
ArgumentNullException.ThrowIfNull(options);
_options = options.Value;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
if (string.IsNullOrWhiteSpace(_options.BucketName))
{
throw new ArgumentException("BucketName must be provided for S3ArtifactStore.", nameof(options));
}
}
public async ValueTask<VexStoredArtifact> SaveAsync(VexExportArtifact artifact, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(artifact);
var key = BuildObjectKey(artifact.ContentAddress, artifact.Format);
if (!_options.OverwriteExisting)
{
var exists = await _client.ObjectExistsAsync(_options.BucketName, key, cancellationToken).ConfigureAwait(false);
if (exists)
{
_logger.LogInformation("S3 object {Bucket}/{Key} already exists; skipping upload.", _options.BucketName, key);
return new VexStoredArtifact(artifact.ContentAddress, key, artifact.Content.Length, artifact.Metadata);
}
}
using var contentStream = new MemoryStream(artifact.Content.ToArray());
await _client.PutObjectAsync(
_options.BucketName,
key,
contentStream,
BuildObjectMetadata(artifact),
cancellationToken).ConfigureAwait(false);
_logger.LogInformation("Uploaded export artifact {Digest} to {Bucket}/{Key}", artifact.ContentAddress.ToUri(), _options.BucketName, key);
return new VexStoredArtifact(
artifact.ContentAddress,
key,
artifact.Content.Length,
artifact.Metadata);
}
public async ValueTask DeleteAsync(VexContentAddress contentAddress, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(contentAddress);
foreach (var key in BuildCandidateKeys(contentAddress))
{
await _client.DeleteObjectAsync(_options.BucketName, key, cancellationToken).ConfigureAwait(false);
}
_logger.LogInformation("Deleted export artifact {Digest} from {Bucket}", contentAddress.ToUri(), _options.BucketName);
}
public async ValueTask<Stream?> OpenReadAsync(VexContentAddress contentAddress, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(contentAddress);
foreach (var key in BuildCandidateKeys(contentAddress))
{
var stream = await _client.GetObjectAsync(_options.BucketName, key, cancellationToken).ConfigureAwait(false);
if (stream is not null)
{
return stream;
}
}
return null;
}
private string BuildObjectKey(VexContentAddress address, VexExportFormat format)
{
var sanitizedDigest = address.Digest.Replace(':', '_');
var prefix = string.IsNullOrWhiteSpace(_options.Prefix) ? string.Empty : _options.Prefix.TrimEnd('/') + "/";
var formatSegment = format.ToString().ToLowerInvariant();
return $"{prefix}{formatSegment}/{sanitizedDigest}{GetExtension(format)}";
}
private IEnumerable<string> BuildCandidateKeys(VexContentAddress address)
{
foreach (VexExportFormat format in Enum.GetValues(typeof(VexExportFormat)))
{
yield return BuildObjectKey(address, format);
}
if (!string.IsNullOrWhiteSpace(_options.Prefix))
{
yield return $"{_options.Prefix.TrimEnd('/')}/{address.Digest.Replace(':', '_')}";
}
yield return address.Digest.Replace(':', '_');
}
private static IDictionary<string, string> BuildObjectMetadata(VexExportArtifact artifact)
{
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
{
["vex-format"] = artifact.Format.ToString().ToLowerInvariant(),
["vex-digest"] = artifact.ContentAddress.ToUri(),
["content-type"] = artifact.Format switch
{
VexExportFormat.Json => "application/json",
VexExportFormat.JsonLines => "application/json",
VexExportFormat.OpenVex => "application/vnd.openvex+json",
VexExportFormat.Csaf => "application/json",
_ => "application/octet-stream",
},
};
foreach (var kvp in artifact.Metadata)
{
metadata[$"meta-{kvp.Key}"] = kvp.Value;
}
return metadata;
}
private static string GetExtension(VexExportFormat format)
=> format switch
{
VexExportFormat.Json => ".json",
VexExportFormat.JsonLines => ".jsonl",
VexExportFormat.OpenVex => ".json",
VexExportFormat.Csaf => ".json",
_ => ".bin",
};
}
public static class S3ArtifactStoreServiceCollectionExtensions
{
public static IServiceCollection AddVexS3ArtifactStore(this IServiceCollection services, Action<S3ArtifactStoreOptions> configure)
{
ArgumentNullException.ThrowIfNull(configure);
services.Configure(configure);
services.AddSingleton<IVexArtifactStore, S3ArtifactStore>();
return services;
}
}

View File

@@ -9,6 +9,7 @@
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="8.0.0" />
<PackageReference Include="System.IO.Abstractions" Version="20.0.28" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Vexer.Core\StellaOps.Vexer.Core.csproj" />

View File

@@ -3,6 +3,7 @@ If you are working on this file you need to read docs/ARCHITECTURE_VEXER.md and
| Task | Owner(s) | Depends on | Notes |
|---|---|---|---|
|VEXER-EXPORT-01-001 Export engine orchestration|Team Vexer Export|VEXER-CORE-01-003|DONE (2025-10-15) Export engine scaffolding with cache lookup, data source hooks, and deterministic manifest emission.|
|VEXER-EXPORT-01-002 Cache index & eviction hooks|Team Vexer Export|VEXER-EXPORT-01-001, VEXER-STORAGE-01-003|TODO Wire cache lookup/write path against `vex.cache` collection and add GC utilities for Worker to prune stale entries deterministically.|
|VEXER-EXPORT-01-003 Artifact store adapters|Team Vexer Export|VEXER-EXPORT-01-001|TODO Provide pluggable storage adapters (filesystem, S3/MinIO) with offline bundle packaging and hash verification.|
|VEXER-EXPORT-01-002 Cache index & eviction hooks|Team Vexer Export|VEXER-EXPORT-01-001, VEXER-STORAGE-01-003|**DONE (2025-10-16)** Export engine now invalidates cache entries on force refresh, cache services expose prune/invalidate APIs, and storage maintenance trims expired/dangling records with Mongo2Go coverage.|
|VEXER-EXPORT-01-003 Artifact store adapters|Team Vexer Export|VEXER-EXPORT-01-001|**DONE (2025-10-16)** Implemented multi-store pipeline with filesystem, S3-compatible, and offline bundle adapters (hash verification + manifest/zip output) plus unit coverage and DI hooks.|
|VEXER-EXPORT-01-004 Attestation handoff integration|Team Vexer Export|VEXER-EXPORT-01-001, VEXER-ATTEST-01-001|TODO Connect export engine to attestation client, persist Rekor metadata, and reuse cached attestations.|
|VEXER-EXPORT-01-005 Score & resolve envelope surfaces|Team Vexer Export|VEXER-EXPORT-01-004, VEXER-CORE-02-001|TODO Emit consensus+score envelopes in export manifests, include policy/scoring digests, and update offline bundle/ORAS layouts to carry signed VEX responses.|

View File

@@ -0,0 +1,54 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.Vexer.Core;
using StellaOps.Vexer.Storage.Mongo;
namespace StellaOps.Vexer.Export;
public interface IVexExportCacheService
{
ValueTask InvalidateAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken);
ValueTask<int> PruneExpiredAsync(DateTimeOffset asOf, CancellationToken cancellationToken);
ValueTask<int> PruneDanglingAsync(CancellationToken cancellationToken);
}
internal sealed class VexExportCacheService : IVexExportCacheService
{
private readonly IVexCacheIndex _cacheIndex;
private readonly IVexCacheMaintenance _maintenance;
private readonly ILogger<VexExportCacheService> _logger;
public VexExportCacheService(
IVexCacheIndex cacheIndex,
IVexCacheMaintenance maintenance,
ILogger<VexExportCacheService> logger)
{
_cacheIndex = cacheIndex ?? throw new ArgumentNullException(nameof(cacheIndex));
_maintenance = maintenance ?? throw new ArgumentNullException(nameof(maintenance));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async ValueTask InvalidateAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(signature);
await _cacheIndex.RemoveAsync(signature, format, cancellationToken).ConfigureAwait(false);
_logger.LogInformation("Invalidated export cache entry {Signature} ({Format})", signature.Value, format);
}
public ValueTask<int> PruneExpiredAsync(DateTimeOffset asOf, CancellationToken cancellationToken)
=> _maintenance.RemoveExpiredAsync(asOf, cancellationToken);
public ValueTask<int> PruneDanglingAsync(CancellationToken cancellationToken)
=> _maintenance.RemoveMissingManifestReferencesAsync(cancellationToken);
}
public static class VexExportCacheServiceCollectionExtensions
{
public static IServiceCollection AddVexExportCacheServices(this IServiceCollection services)
{
services.AddSingleton<IVexExportCacheService, VexExportCacheService>();
return services;
}
}

View File

@@ -1,3 +1,4 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using Microsoft.Extensions.DependencyInjection;
@@ -27,13 +28,17 @@ public sealed record VexPolicySnapshot(
string Version,
VexConsensusPolicyOptions ConsensusOptions,
IVexConsensusPolicy ConsensusPolicy,
ImmutableArray<VexPolicyIssue> Issues)
ImmutableArray<VexPolicyIssue> Issues,
string RevisionId,
string Digest)
{
public static readonly VexPolicySnapshot Default = new(
VexConsensusPolicyOptions.BaselineVersion,
new VexConsensusPolicyOptions(),
new BaselineVexConsensusPolicy(),
ImmutableArray<VexPolicyIssue>.Empty);
ImmutableArray<VexPolicyIssue>.Empty,
"rev-0",
string.Empty);
}
public sealed record VexPolicyIssue(
@@ -51,6 +56,10 @@ public sealed class VexPolicyProvider : IVexPolicyProvider
{
private readonly IOptionsMonitor<VexPolicyOptions> _options;
private readonly ILogger<VexPolicyProvider> _logger;
private readonly object _sync = new();
private long _revisionCounter;
private string? _currentRevisionId;
private string? _currentDigest;
public VexPolicyProvider(
IOptionsMonitor<VexPolicyOptions> options,
@@ -68,36 +77,48 @@ public sealed class VexPolicyProvider : IVexPolicyProvider
private VexPolicySnapshot BuildSnapshot(VexPolicyOptions options)
{
var issues = ImmutableArray.CreateBuilder<VexPolicyIssue>();
var normalization = VexPolicyProcessing.Normalize(options);
var digest = VexPolicyDigest.Compute(normalization.ConsensusOptions);
string revisionId;
bool isNewRevision;
if (!TryNormalizeWeights(options.Weights, out var weightOptions, issues))
lock (_sync)
{
issues.Add(new VexPolicyIssue(
"weights.invalid",
"Weight configuration is invalid; falling back to defaults.",
VexPolicyIssueSeverity.Warning));
weightOptions = new VexConsensusPolicyOptions();
if (!string.Equals(_currentDigest, digest, StringComparison.Ordinal))
{
_revisionCounter++;
revisionId = $"rev-{_revisionCounter}";
_currentDigest = digest;
_currentRevisionId = revisionId;
isNewRevision = true;
}
else
{
revisionId = _currentRevisionId ?? "rev-0";
isNewRevision = false;
}
}
var overrides = NormalizeOverrides(options.ProviderOverrides, issues);
var consensusOptions = new VexConsensusPolicyOptions(
options.Version ?? VexConsensusPolicyOptions.BaselineVersion,
weightOptions.VendorWeight,
weightOptions.DistroWeight,
weightOptions.PlatformWeight,
weightOptions.HubWeight,
weightOptions.AttestationWeight,
overrides);
var policy = new BaselineVexConsensusPolicy(consensusOptions);
var policy = new BaselineVexConsensusPolicy(normalization.ConsensusOptions);
var snapshot = new VexPolicySnapshot(
consensusOptions.Version,
consensusOptions,
normalization.ConsensusOptions.Version,
normalization.ConsensusOptions,
policy,
issues.ToImmutable());
normalization.Issues,
revisionId,
digest);
if (snapshot.Issues.Length > 0)
if (isNewRevision)
{
_logger.LogInformation(
"Policy snapshot updated: revision {RevisionId}, version {Version}, digest {Digest}, issues {IssueCount}",
snapshot.RevisionId,
snapshot.Version,
snapshot.Digest,
snapshot.Issues.Length);
VexPolicyTelemetry.RecordReload(snapshot.RevisionId, snapshot.Version, snapshot.Issues.Length);
}
else if (snapshot.Issues.Length > 0)
{
foreach (var issue in snapshot.Issues)
{
@@ -107,93 +128,6 @@ public sealed class VexPolicyProvider : IVexPolicyProvider
return snapshot;
}
private static bool TryNormalizeWeights(
VexPolicyWeightOptions options,
out VexConsensusPolicyOptions normalized,
ImmutableArray<VexPolicyIssue>.Builder issues)
{
var hasAny = options is not null &&
(options.Vendor.HasValue || options.Distro.HasValue ||
options.Platform.HasValue || options.Hub.HasValue || options.Attestation.HasValue);
if (!hasAny)
{
normalized = new VexConsensusPolicyOptions();
return true;
}
var vendor = Clamp(options.Vendor, nameof(options.Vendor), issues);
var distro = Clamp(options.Distro, nameof(options.Distro), issues);
var platform = Clamp(options.Platform, nameof(options.Platform), issues);
var hub = Clamp(options.Hub, nameof(options.Hub), issues);
var attestation = Clamp(options.Attestation, nameof(options.Attestation), issues);
normalized = new VexConsensusPolicyOptions(
VexConsensusPolicyOptions.BaselineVersion,
vendor ?? 1.0,
distro ?? 0.9,
platform ?? 0.7,
hub ?? 0.5,
attestation ?? 0.6);
return true;
}
private static double? Clamp(double? value, string fieldName, ImmutableArray<VexPolicyIssue>.Builder issues)
{
if (value is null)
{
return null;
}
if (double.IsNaN(value.Value) || double.IsInfinity(value.Value))
{
issues.Add(new VexPolicyIssue(
$"weights.{fieldName}.invalid",
$"{fieldName} must be a finite number.",
VexPolicyIssueSeverity.Warning));
return null;
}
if (value.Value < 0 || value.Value > 1)
{
issues.Add(new VexPolicyIssue(
$"weights.{fieldName}.range",
$"{fieldName} must be between 0 and 1; value {value.Value.ToString(CultureInfo.InvariantCulture)} was clamped.",
VexPolicyIssueSeverity.Warning));
return Math.Clamp(value.Value, 0, 1);
}
return value.Value;
}
private static ImmutableDictionary<string, double> NormalizeOverrides(
IDictionary<string, double>? overrides,
ImmutableArray<VexPolicyIssue>.Builder issues)
{
if (overrides is null || overrides.Count == 0)
{
return ImmutableDictionary<string, double>.Empty;
}
var builder = ImmutableDictionary.CreateBuilder<string, double>(StringComparer.Ordinal);
foreach (var kvp in overrides)
{
if (string.IsNullOrWhiteSpace(kvp.Key))
{
issues.Add(new VexPolicyIssue(
"overrides.key.missing",
"Encountered provider override with empty key; ignoring entry.",
VexPolicyIssueSeverity.Warning));
continue;
}
var weight = Clamp(kvp.Value, $"overrides.{kvp.Key}", issues) ?? kvp.Value;
builder[kvp.Key.Trim()] = weight;
}
return builder.ToImmutable();
}
}
public sealed class VexPolicyEvaluator : IVexPolicyEvaluator

View File

@@ -9,6 +9,7 @@
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="8.0.0" />
<PackageReference Include="YamlDotNet" Version="13.7.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Vexer.Core\StellaOps.Vexer.Core.csproj" />

View File

@@ -4,6 +4,8 @@ If you are working on this file you need to read docs/ARCHITECTURE_VEXER.md and
|---|---|---|---|
|VEXER-POLICY-01-001 Policy schema & binding|Team Vexer Policy|VEXER-CORE-01-001|DONE (2025-10-15) Established `VexPolicyOptions`, options binding, and snapshot provider covering baseline weights/overrides.|
|VEXER-POLICY-01-002 Policy evaluator service|Team Vexer Policy|VEXER-POLICY-01-001|DONE (2025-10-15) `VexPolicyEvaluator` exposes immutable snapshots to consensus and normalizes rejection reasons.|
|VEXER-POLICY-01-003 Operator diagnostics & docs|Team Vexer Policy|VEXER-POLICY-01-001|TODO Surface structured diagnostics (CLI/WebService) and author policy upgrade guidance in docs/ARCHITECTURE_VEXER.md appendix.|
|VEXER-POLICY-01-004 Policy schema validation & YAML binding|Team Vexer Policy|VEXER-POLICY-01-001|TODO Add strongly-typed YAML/JSON binding, schema validation, and deterministic diagnostics for operator-supplied policy bundles.|
|VEXER-POLICY-01-005 Policy change tracking & telemetry|Team Vexer Policy|VEXER-POLICY-01-002|TODO Emit revision history, expose snapshot digests via CLI/WebService, and add structured logging/metrics for policy reloads.|
|VEXER-POLICY-01-003 Operator diagnostics & docs|Team Vexer Policy|VEXER-POLICY-01-001|**DONE (2025-10-16)** Surface structured diagnostics (CLI/WebService) and author policy upgrade guidance in docs/ARCHITECTURE_VEXER.md appendix.<br>2025-10-16: Added `IVexPolicyDiagnostics`/`VexPolicyDiagnosticsReport`, sorted issue ordering, recommendations, and appendix guidance. Tests: `dotnet test src/StellaOps.Vexer.Core.Tests/StellaOps.Vexer.Core.Tests.csproj`.|
|VEXER-POLICY-01-004 Policy schema validation & YAML binding|Team Vexer Policy|VEXER-POLICY-01-001|**DONE (2025-10-16)** Added strongly-typed YAML/JSON binding, schema validation, and deterministic diagnostics for operator-supplied policy bundles.|
|VEXER-POLICY-01-005 Policy change tracking & telemetry|Team Vexer Policy|VEXER-POLICY-01-002|**DONE (2025-10-16)** Emit revision history, expose snapshot digests via CLI/WebService, and add structured logging/metrics for policy reloads.<br>2025-10-16: `VexPolicySnapshot` now carries revision/digest, provider logs reloads, `vex.policy.reloads` metric emitted, binder/diagnostics expose digest metadata. Tests: `dotnet test src/StellaOps.Vexer.Core.Tests/StellaOps.Vexer.Core.Tests.csproj`.|
|VEXER-POLICY-02-001 Scoring coefficients & weight ceilings|Team Vexer Policy|VEXER-POLICY-01-004|TODO Extend `VexPolicyOptions` with α/β boosters and optional >1.0 weight ceilings, validate ranges, and document operator guidance in `docs/ARCHITECTURE_VEXER.md`/`docs/VEXER_SCORRING.md`.|
|VEXER-POLICY-02-002 Diagnostics for scoring signals|Team Vexer Policy|VEXER-POLICY-02-001|BACKLOG Update diagnostics reports to surface missing severity/KEV/EPSS mappings, coefficient overrides, and provide actionable recommendations for policy tuning.|

View File

@@ -0,0 +1,94 @@
using System.Collections.Immutable;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using StellaOps.Vexer.Core;
using YamlDotNet.Serialization;
using YamlDotNet.Serialization.NamingConventions;
namespace StellaOps.Vexer.Policy;
public enum VexPolicyDocumentFormat
{
Json,
Yaml,
}
public sealed record VexPolicyBindingResult(
bool Success,
VexPolicyOptions? Options,
VexConsensusPolicyOptions? NormalizedOptions,
ImmutableArray<VexPolicyIssue> Issues);
public static class VexPolicyBinder
{
public static VexPolicyBindingResult Bind(string content, VexPolicyDocumentFormat format)
{
if (string.IsNullOrWhiteSpace(content))
{
return Failure("policy.empty", "Policy document is empty.");
}
try
{
var options = Parse(content, format);
return Normalize(options);
}
catch (JsonException ex)
{
return Failure("policy.parse.json", $"Failed to parse JSON policy document: {ex.Message}");
}
catch (YamlDotNet.Core.YamlException ex)
{
return Failure("policy.parse.yaml", $"Failed to parse YAML policy document: {ex.Message}");
}
}
public static VexPolicyBindingResult Bind(Stream stream, VexPolicyDocumentFormat format, Encoding? encoding = null)
{
if (stream is null)
{
throw new ArgumentNullException(nameof(stream));
}
encoding ??= Encoding.UTF8;
using var reader = new StreamReader(stream, encoding, detectEncodingFromByteOrderMarks: true, leaveOpen: true);
var content = reader.ReadToEnd();
return Bind(content, format);
}
private static VexPolicyBindingResult Normalize(VexPolicyOptions options)
{
var normalization = VexPolicyProcessing.Normalize(options);
var hasErrors = normalization.Issues.Any(static issue => issue.Severity == VexPolicyIssueSeverity.Error);
return new VexPolicyBindingResult(!hasErrors, options, normalization.ConsensusOptions, normalization.Issues);
}
private static VexPolicyBindingResult Failure(string code, string message)
{
var issue = new VexPolicyIssue(code, message, VexPolicyIssueSeverity.Error);
return new VexPolicyBindingResult(false, null, null, ImmutableArray.Create(issue));
}
private static VexPolicyOptions Parse(string content, VexPolicyDocumentFormat format)
{
return format switch
{
VexPolicyDocumentFormat.Json => JsonSerializer.Deserialize<VexPolicyOptions>(content, new JsonSerializerOptions
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true,
}) ?? new VexPolicyOptions(),
VexPolicyDocumentFormat.Yaml => BuildYamlDeserializer().Deserialize<VexPolicyOptions>(content) ?? new VexPolicyOptions(),
_ => throw new ArgumentOutOfRangeException(nameof(format), format, "Unsupported policy document format."),
};
}
private static IDeserializer BuildYamlDeserializer()
=> new DeserializerBuilder()
.WithNamingConvention(CamelCaseNamingConvention.Instance)
.IgnoreUnmatchedProperties()
.Build();
}

View File

@@ -0,0 +1,87 @@
using System;
using System.Collections.Immutable;
using System.Linq;
namespace StellaOps.Vexer.Policy;
public interface IVexPolicyDiagnostics
{
VexPolicyDiagnosticsReport GetDiagnostics();
}
public sealed record VexPolicyDiagnosticsReport(
string Version,
string RevisionId,
string Digest,
int ErrorCount,
int WarningCount,
DateTimeOffset GeneratedAt,
ImmutableArray<VexPolicyIssue> Issues,
ImmutableArray<string> Recommendations,
ImmutableDictionary<string, double> ActiveOverrides);
public sealed class VexPolicyDiagnostics : IVexPolicyDiagnostics
{
private readonly IVexPolicyProvider _policyProvider;
private readonly TimeProvider _timeProvider;
public VexPolicyDiagnostics(
IVexPolicyProvider policyProvider,
TimeProvider? timeProvider = null)
{
_policyProvider = policyProvider ?? throw new ArgumentNullException(nameof(policyProvider));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public VexPolicyDiagnosticsReport GetDiagnostics()
{
var snapshot = _policyProvider.GetSnapshot();
var issues = snapshot.Issues;
var errorCount = issues.Count(static issue => issue.Severity == VexPolicyIssueSeverity.Error);
var warningCount = issues.Count(static issue => issue.Severity == VexPolicyIssueSeverity.Warning);
var overrides = snapshot.ConsensusOptions.ProviderOverrides
.OrderBy(static pair => pair.Key, StringComparer.Ordinal)
.ToImmutableDictionary();
var recommendations = BuildRecommendations(errorCount, warningCount, overrides);
return new VexPolicyDiagnosticsReport(
snapshot.Version,
snapshot.RevisionId,
snapshot.Digest,
errorCount,
warningCount,
_timeProvider.GetUtcNow(),
issues,
recommendations,
overrides);
}
private static ImmutableArray<string> BuildRecommendations(
int errorCount,
int warningCount,
ImmutableDictionary<string, double> overrides)
{
var messages = ImmutableArray.CreateBuilder<string>();
if (errorCount > 0)
{
messages.Add("Resolve policy errors before running consensus; defaults are used while errors persist.");
}
if (warningCount > 0)
{
messages.Add("Review policy warnings via CLI/Web diagnostics and adjust configuration as needed.");
}
if (overrides.Count > 0)
{
messages.Add($"Provider overrides active for: {string.Join(", ", overrides.Keys)}.");
}
messages.Add("Refer to docs/ARCHITECTURE_VEXER.md for policy upgrade and diagnostics guidance.");
return messages.ToImmutable();
}
}

View File

@@ -0,0 +1,35 @@
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Policy;
internal static class VexPolicyDigest
{
public static string Compute(VexConsensusPolicyOptions options)
{
ArgumentNullException.ThrowIfNull(options);
var builder = new StringBuilder();
builder.Append(options.Version).Append('|')
.Append(options.VendorWeight.ToString("F6", CultureInfo.InvariantCulture)).Append('|')
.Append(options.DistroWeight.ToString("F6", CultureInfo.InvariantCulture)).Append('|')
.Append(options.PlatformWeight.ToString("F6", CultureInfo.InvariantCulture)).Append('|')
.Append(options.HubWeight.ToString("F6", CultureInfo.InvariantCulture)).Append('|')
.Append(options.AttestationWeight.ToString("F6", CultureInfo.InvariantCulture));
foreach (var kvp in options.ProviderOverrides
.OrderBy(static pair => pair.Key, StringComparer.Ordinal))
{
builder.Append('|')
.Append(kvp.Key)
.Append('=')
.Append(kvp.Value.ToString("F6", CultureInfo.InvariantCulture));
}
var input = builder.ToString();
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return Convert.ToHexString(hash);
}
}

View File

@@ -0,0 +1,166 @@
using System.Collections.Immutable;
using System.Globalization;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Policy;
internal static class VexPolicyProcessing
{
public static VexPolicyNormalizationResult Normalize(VexPolicyOptions? options)
{
var issues = ImmutableArray.CreateBuilder<VexPolicyIssue>();
var policyOptions = options ?? new VexPolicyOptions();
if (!TryNormalizeWeights(
policyOptions.Weights,
out var normalizedWeights,
issues))
{
issues.Add(new VexPolicyIssue(
"weights.invalid",
"Weight configuration is invalid; falling back to defaults.",
VexPolicyIssueSeverity.Warning));
normalizedWeights = new VexConsensusPolicyOptions();
}
var overrides = NormalizeOverrides(policyOptions.ProviderOverrides, issues);
var consensusOptions = new VexConsensusPolicyOptions(
policyOptions.Version ?? VexConsensusPolicyOptions.BaselineVersion,
normalizedWeights.VendorWeight,
normalizedWeights.DistroWeight,
normalizedWeights.PlatformWeight,
normalizedWeights.HubWeight,
normalizedWeights.AttestationWeight,
overrides);
var orderedIssues = issues.ToImmutable().Sort(IssueComparer);
return new VexPolicyNormalizationResult(consensusOptions, orderedIssues);
}
public static ImmutableArray<VexPolicyIssue> SortIssues(IEnumerable<VexPolicyIssue> issues)
=> issues.ToImmutableArray().Sort(IssueComparer);
private static bool TryNormalizeWeights(
VexPolicyWeightOptions? options,
out VexConsensusPolicyOptions normalized,
ImmutableArray<VexPolicyIssue>.Builder issues)
{
if (options is null)
{
normalized = new VexConsensusPolicyOptions();
return true;
}
var hasAny =
options.Vendor.HasValue ||
options.Distro.HasValue ||
options.Platform.HasValue ||
options.Hub.HasValue ||
options.Attestation.HasValue;
if (!hasAny)
{
normalized = new VexConsensusPolicyOptions();
return true;
}
var vendor = Clamp(options.Vendor, nameof(options.Vendor), issues);
var distro = Clamp(options.Distro, nameof(options.Distro), issues);
var platform = Clamp(options.Platform, nameof(options.Platform), issues);
var hub = Clamp(options.Hub, nameof(options.Hub), issues);
var attestation = Clamp(options.Attestation, nameof(options.Attestation), issues);
normalized = new VexConsensusPolicyOptions(
VexConsensusPolicyOptions.BaselineVersion,
vendor ?? 1.0,
distro ?? 0.9,
platform ?? 0.7,
hub ?? 0.5,
attestation ?? 0.6);
return true;
}
private static double? Clamp(double? value, string fieldName, ImmutableArray<VexPolicyIssue>.Builder issues)
{
if (value is null)
{
return null;
}
if (double.IsNaN(value.Value) || double.IsInfinity(value.Value))
{
issues.Add(new VexPolicyIssue(
$"weights.{fieldName}.invalid",
$"{fieldName} must be a finite number.",
VexPolicyIssueSeverity.Warning));
return null;
}
if (value.Value < 0 || value.Value > 1)
{
issues.Add(new VexPolicyIssue(
$"weights.{fieldName}.range",
$"{fieldName} must be between 0 and 1; value {value.Value.ToString(CultureInfo.InvariantCulture)} was clamped.",
VexPolicyIssueSeverity.Warning));
return Math.Clamp(value.Value, 0, 1);
}
return value.Value;
}
private static ImmutableDictionary<string, double> NormalizeOverrides(
IDictionary<string, double>? overrides,
ImmutableArray<VexPolicyIssue>.Builder issues)
{
if (overrides is null || overrides.Count == 0)
{
return ImmutableDictionary<string, double>.Empty;
}
var builder = ImmutableDictionary.CreateBuilder<string, double>(StringComparer.Ordinal);
foreach (var kvp in overrides)
{
if (string.IsNullOrWhiteSpace(kvp.Key))
{
issues.Add(new VexPolicyIssue(
"overrides.key.missing",
"Encountered provider override with empty key; ignoring entry.",
VexPolicyIssueSeverity.Warning));
continue;
}
var weight = Clamp(kvp.Value, $"overrides.{kvp.Key}", issues) ?? kvp.Value;
builder[kvp.Key.Trim()] = weight;
}
return builder.ToImmutable();
}
private static int CompareIssues(VexPolicyIssue left, VexPolicyIssue right)
{
var severityCompare = GetSeverityRank(left.Severity).CompareTo(GetSeverityRank(right.Severity));
if (severityCompare != 0)
{
return severityCompare;
}
return string.Compare(left.Code, right.Code, StringComparison.Ordinal);
}
private static int GetSeverityRank(VexPolicyIssueSeverity severity)
=> severity switch
{
VexPolicyIssueSeverity.Error => 0,
VexPolicyIssueSeverity.Warning => 1,
_ => 2,
};
private static readonly Comparer<VexPolicyIssue> IssueComparer = Comparer<VexPolicyIssue>.Create(CompareIssues);
internal sealed record VexPolicyNormalizationResult(
VexConsensusPolicyOptions ConsensusOptions,
ImmutableArray<VexPolicyIssue> Issues);
}

View File

@@ -0,0 +1,24 @@
using System.Collections.Generic;
using System.Diagnostics.Metrics;
namespace StellaOps.Vexer.Policy;
internal static class VexPolicyTelemetry
{
private const string MeterName = "StellaOps.Vexer.Policy";
private const string MeterVersion = "1.0.0";
private static readonly Meter Meter = new(MeterName, MeterVersion);
private static readonly Counter<long> PolicyReloads = Meter.CreateCounter<long>("vex.policy.reloads", unit: "events");
public static void RecordReload(string revisionId, string version, int issueCount)
{
var tags = new KeyValuePair<string, object?>[]
{
new("revision", revisionId),
new("version", version),
new("issues", issueCount),
};
PolicyReloads.Add(1, tags);
}
}

View File

@@ -0,0 +1,122 @@
using System.Collections.Generic;
using Microsoft.Extensions.Logging.Abstractions;
using Mongo2Go;
using MongoDB.Bson;
using MongoDB.Driver;
namespace StellaOps.Vexer.Storage.Mongo.Tests;
public sealed class MongoVexCacheMaintenanceTests : IAsyncLifetime
{
private readonly MongoDbRunner _runner;
private readonly IMongoDatabase _database;
public MongoVexCacheMaintenanceTests()
{
_runner = MongoDbRunner.Start();
var client = new MongoClient(_runner.ConnectionString);
_database = client.GetDatabase("vex-cache-maintenance-tests");
VexMongoMappingRegistry.Register();
}
[Fact]
public async Task RemoveExpiredAsync_DeletesEntriesBeforeCutoff()
{
var collection = _database.GetCollection<VexCacheEntryRecord>(VexMongoCollectionNames.Cache);
var now = DateTime.UtcNow;
await collection.InsertManyAsync(new[]
{
new VexCacheEntryRecord
{
Id = "sig-1|json",
QuerySignature = "sig-1",
Format = "json",
ArtifactAlgorithm = "sha256",
ArtifactDigest = "deadbeef",
CreatedAt = now.AddHours(-2),
ExpiresAt = now.AddHours(-1),
},
new VexCacheEntryRecord
{
Id = "sig-2|json",
QuerySignature = "sig-2",
Format = "json",
ArtifactAlgorithm = "sha256",
ArtifactDigest = "cafebabe",
CreatedAt = now,
ExpiresAt = now.AddHours(1),
},
});
var maintenance = new MongoVexCacheMaintenance(_database, NullLogger<MongoVexCacheMaintenance>.Instance);
var removed = await maintenance.RemoveExpiredAsync(DateTimeOffset.UtcNow, CancellationToken.None);
Assert.Equal(1, removed);
var remaining = await collection.CountDocumentsAsync(FilterDefinition<VexCacheEntryRecord>.Empty);
Assert.Equal(1, remaining);
}
[Fact]
public async Task RemoveMissingManifestReferencesAsync_DropsDanglingEntries()
{
var cache = _database.GetCollection<VexCacheEntryRecord>(VexMongoCollectionNames.Cache);
var exports = _database.GetCollection<VexExportManifestRecord>(VexMongoCollectionNames.Exports);
await exports.InsertOneAsync(new VexExportManifestRecord
{
Id = "manifest-existing",
QuerySignature = "sig-keep",
Format = "json",
CreatedAt = DateTime.UtcNow,
ArtifactAlgorithm = "sha256",
ArtifactDigest = "keep",
ClaimCount = 1,
SourceProviders = new List<string> { "vendor" },
});
await cache.InsertManyAsync(new[]
{
new VexCacheEntryRecord
{
Id = "sig-remove|json",
QuerySignature = "sig-remove",
Format = "json",
ArtifactAlgorithm = "sha256",
ArtifactDigest = "drop",
CreatedAt = DateTime.UtcNow,
ManifestId = "manifest-missing",
},
new VexCacheEntryRecord
{
Id = "sig-keep|json",
QuerySignature = "sig-keep",
Format = "json",
ArtifactAlgorithm = "sha256",
ArtifactDigest = "keep",
CreatedAt = DateTime.UtcNow,
ManifestId = "manifest-existing",
},
});
var maintenance = new MongoVexCacheMaintenance(_database, NullLogger<MongoVexCacheMaintenance>.Instance);
var removed = await maintenance.RemoveMissingManifestReferencesAsync(CancellationToken.None);
Assert.Equal(1, removed);
var remainingIds = await cache.Find(Builders<VexCacheEntryRecord>.Filter.Empty)
.Project(x => x.Id)
.ToListAsync();
Assert.Single(remainingIds);
Assert.Contains("sig-keep|json", remainingIds);
}
public Task InitializeAsync() => Task.CompletedTask;
public Task DisposeAsync()
{
_runner.Dispose();
return Task.CompletedTask;
}
}

View File

@@ -0,0 +1,206 @@
using System.Collections.Immutable;
using System.Globalization;
using System.Text;
using Microsoft.Extensions.Options;
using Mongo2Go;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Storage.Mongo.Tests;
public sealed class MongoVexRepositoryTests : IAsyncLifetime
{
private readonly MongoDbRunner _runner;
private readonly MongoClient _client;
public MongoVexRepositoryTests()
{
_runner = MongoDbRunner.Start();
_client = new MongoClient(_runner.ConnectionString);
}
[Fact]
public async Task RawStore_UsesGridFsForLargePayloads()
{
var database = _client.GetDatabase($"vex-raw-gridfs-{Guid.NewGuid():N}");
var store = CreateRawStore(database, thresholdBytes: 32);
var payload = Encoding.UTF8.GetBytes(new string('A', 256));
var document = new VexRawDocument(
"red-hat",
VexDocumentFormat.Csaf,
new Uri("https://example.com/redhat/csaf.json"),
DateTimeOffset.UtcNow,
"sha256:large",
payload,
ImmutableDictionary<string, string>.Empty);
await store.StoreAsync(document, CancellationToken.None);
var rawCollection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Raw);
var stored = await rawCollection.Find(Builders<BsonDocument>.Filter.Eq("_id", document.Digest))
.FirstOrDefaultAsync();
Assert.NotNull(stored);
Assert.True(stored!.TryGetValue("GridFsObjectId", out var gridId));
Assert.False(gridId.IsBsonNull);
Assert.Empty(stored["Content"].AsBsonBinaryData.Bytes);
var filesCollection = database.GetCollection<BsonDocument>("vex.raw.files");
var fileCount = await filesCollection.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty);
Assert.Equal(1, fileCount);
var fetched = await store.FindByDigestAsync(document.Digest, CancellationToken.None);
Assert.NotNull(fetched);
Assert.Equal(payload, fetched!.Content.ToArray());
}
[Fact]
public async Task RawStore_ReplacesGridFsWithInlinePayload()
{
var database = _client.GetDatabase($"vex-raw-inline-{Guid.NewGuid():N}");
var store = CreateRawStore(database, thresholdBytes: 16);
var largePayload = Encoding.UTF8.GetBytes(new string('B', 128));
var digest = "sha256:inline";
var largeDocument = new VexRawDocument(
"cisco",
VexDocumentFormat.CycloneDx,
new Uri("https://example.com/cyclonedx.json"),
DateTimeOffset.UtcNow,
digest,
largePayload,
ImmutableDictionary<string, string>.Empty);
await store.StoreAsync(largeDocument, CancellationToken.None);
var smallDocument = largeDocument with
{
RetrievedAt = DateTimeOffset.UtcNow.AddMinutes(1),
Content = Encoding.UTF8.GetBytes("small"),
};
await store.StoreAsync(smallDocument, CancellationToken.None);
var rawCollection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Raw);
var stored = await rawCollection.Find(Builders<BsonDocument>.Filter.Eq("_id", digest))
.FirstOrDefaultAsync();
Assert.NotNull(stored);
Assert.True(stored!.TryGetValue("GridFsObjectId", out var gridId));
Assert.True(gridId.IsBsonNull);
Assert.Equal("small", Encoding.UTF8.GetString(stored["Content"].AsBsonBinaryData.Bytes));
var filesCollection = database.GetCollection<BsonDocument>("vex.raw.files");
var fileCount = await filesCollection.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty);
Assert.Equal(0, fileCount);
}
[Fact]
public async Task ExportStore_SavesManifestAndCacheTransactionally()
{
var database = _client.GetDatabase($"vex-export-save-{Guid.NewGuid():N}");
var options = Options.Create(new VexMongoStorageOptions
{
ExportCacheTtl = TimeSpan.FromHours(6),
GridFsInlineThresholdBytes = 64,
});
var store = new MongoVexExportStore(_client, database, options);
var signature = new VexQuerySignature("format=csaf|provider=redhat");
var manifest = new VexExportManifest(
"exports/20251016/redhat",
signature,
VexExportFormat.Csaf,
DateTimeOffset.UtcNow,
new VexContentAddress("sha256", "abcdef123456"),
claimCount: 5,
sourceProviders: new[] { "red-hat" },
fromCache: false,
consensusRevision: "rev-1",
attestation: null,
sizeBytes: 1024);
await store.SaveAsync(manifest, CancellationToken.None);
var exportsCollection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Exports);
var exportKey = BuildExportKey(signature, VexExportFormat.Csaf);
var exportDoc = await exportsCollection.Find(Builders<BsonDocument>.Filter.Eq("_id", exportKey))
.FirstOrDefaultAsync();
Assert.NotNull(exportDoc);
var cacheCollection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Cache);
var cacheKey = BuildExportKey(signature, VexExportFormat.Csaf);
var cacheDoc = await cacheCollection.Find(Builders<BsonDocument>.Filter.Eq("_id", cacheKey))
.FirstOrDefaultAsync();
Assert.NotNull(cacheDoc);
Assert.Equal(manifest.ExportId, cacheDoc!["ManifestId"].AsString);
Assert.True(cacheDoc.TryGetValue("ExpiresAt", out var expiresValue));
Assert.False(expiresValue.IsBsonNull);
}
[Fact]
public async Task ExportStore_FindAsync_ExpiresCacheEntries()
{
var database = _client.GetDatabase($"vex-export-expire-{Guid.NewGuid():N}");
var options = Options.Create(new VexMongoStorageOptions
{
ExportCacheTtl = TimeSpan.FromMinutes(5),
GridFsInlineThresholdBytes = 64,
});
var store = new MongoVexExportStore(_client, database, options);
var signature = new VexQuerySignature("format=json|provider=cisco");
var manifest = new VexExportManifest(
"exports/20251016/cisco",
signature,
VexExportFormat.Json,
DateTimeOffset.UtcNow,
new VexContentAddress("sha256", "deadbeef"),
claimCount: 3,
sourceProviders: new[] { "cisco" },
fromCache: false,
consensusRevision: "rev-2",
attestation: null,
sizeBytes: 2048);
await store.SaveAsync(manifest, CancellationToken.None);
var cacheCollection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Cache);
var cacheId = BuildExportKey(signature, VexExportFormat.Json);
var update = Builders<BsonDocument>.Update.Set("ExpiresAt", DateTime.UtcNow.AddMinutes(-10));
await cacheCollection.UpdateOneAsync(Builders<BsonDocument>.Filter.Eq("_id", cacheId), update);
var cached = await store.FindAsync(signature, VexExportFormat.Json, CancellationToken.None);
Assert.Null(cached);
var remaining = await cacheCollection.Find(Builders<BsonDocument>.Filter.Eq("_id", cacheId))
.FirstOrDefaultAsync();
Assert.Null(remaining);
}
private MongoVexRawStore CreateRawStore(IMongoDatabase database, int thresholdBytes)
{
var options = Options.Create(new VexMongoStorageOptions
{
RawBucketName = "vex.raw",
GridFsInlineThresholdBytes = thresholdBytes,
ExportCacheTtl = TimeSpan.FromHours(1),
});
return new MongoVexRawStore(_client, database, options);
}
private static string BuildExportKey(VexQuerySignature signature, VexExportFormat format)
=> string.Format(CultureInfo.InvariantCulture, "{0}|{1}", signature.Value, format.ToString().ToLowerInvariant());
public Task InitializeAsync() => Task.CompletedTask;
public Task DisposeAsync()
{
_runner.Dispose();
return Task.CompletedTask;
}
}

View File

@@ -0,0 +1,242 @@
using System.Globalization;
using Mongo2Go;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Storage.Mongo.Tests;
public sealed class MongoVexStoreMappingTests : IAsyncLifetime
{
private readonly MongoDbRunner _runner;
private readonly IMongoDatabase _database;
public MongoVexStoreMappingTests()
{
_runner = MongoDbRunner.Start();
var client = new MongoClient(_runner.ConnectionString);
_database = client.GetDatabase("vexer-storage-mapping-tests");
VexMongoMappingRegistry.Register();
}
[Fact]
public async Task ProviderStore_RoundTrips_WithExtraFields()
{
var providers = _database.GetCollection<BsonDocument>(VexMongoCollectionNames.Providers);
var providerId = "red-hat";
var document = new BsonDocument
{
{ "_id", providerId },
{ "DisplayName", "Red Hat CSAF" },
{ "Kind", "vendor" },
{ "BaseUris", new BsonArray { "https://example.com/csaf" } },
{
"Discovery",
new BsonDocument
{
{ "WellKnownMetadata", "https://example.com/.well-known/csaf" },
{ "RolIeService", "https://example.com/service/rolie" },
{ "UnsupportedField", "ignored" },
}
},
{
"Trust",
new BsonDocument
{
{ "Weight", 0.75 },
{
"Cosign",
new BsonDocument
{
{ "Issuer", "issuer@example.com" },
{ "IdentityPattern", "spiffe://example/*" },
{ "Unexpected", true },
}
},
{ "PgpFingerprints", new BsonArray { "ABCDEF1234567890" } },
{ "AnotherIgnoredField", 123 },
}
},
{ "Enabled", true },
{ "UnexpectedRoot", new BsonDocument { { "flag", true } } },
};
await providers.InsertOneAsync(document);
var store = new MongoVexProviderStore(_database);
var result = await store.FindAsync(providerId, CancellationToken.None);
Assert.NotNull(result);
Assert.Equal(providerId, result!.Id);
Assert.Equal("Red Hat CSAF", result.DisplayName);
Assert.Equal(VexProviderKind.Vendor, result.Kind);
Assert.Single(result.BaseUris);
Assert.Equal("https://example.com/csaf", result.BaseUris[0].ToString());
Assert.Equal("https://example.com/.well-known/csaf", result.Discovery.WellKnownMetadata?.ToString());
Assert.Equal("https://example.com/service/rolie", result.Discovery.RolIeService?.ToString());
Assert.Equal(0.75, result.Trust.Weight);
Assert.NotNull(result.Trust.Cosign);
Assert.Equal("issuer@example.com", result.Trust.Cosign!.Issuer);
Assert.Equal("spiffe://example/*", result.Trust.Cosign!.IdentityPattern);
Assert.Contains("ABCDEF1234567890", result.Trust.PgpFingerprints);
Assert.True(result.Enabled);
}
[Fact]
public async Task ConsensusStore_IgnoresUnknownFields()
{
var consensus = _database.GetCollection<BsonDocument>(VexMongoCollectionNames.Consensus);
var vulnerabilityId = "CVE-2025-12345";
var productKey = "pkg:maven/org.example/app@1.2.3";
var consensusId = string.Format(CultureInfo.InvariantCulture, "{0}|{1}", vulnerabilityId.Trim(), productKey.Trim());
var document = new BsonDocument
{
{ "_id", consensusId },
{ "VulnerabilityId", vulnerabilityId },
{
"Product",
new BsonDocument
{
{ "Key", productKey },
{ "Name", "Example App" },
{ "Version", "1.2.3" },
{ "Purl", productKey },
{ "Extra", "ignored" },
}
},
{ "Status", "notaffected" },
{ "CalculatedAt", DateTime.UtcNow },
{
"Sources",
new BsonArray
{
new BsonDocument
{
{ "ProviderId", "red-hat" },
{ "Status", "notaffected" },
{ "DocumentDigest", "sha256:123" },
{ "Weight", 0.9 },
{ "Justification", "componentnotpresent" },
{ "Detail", "Vendor statement" },
{
"Confidence",
new BsonDocument
{
{ "Level", "high" },
{ "Score", 0.7 },
{ "Method", "review" },
{ "Unexpected", "ignored" },
}
},
{ "UnknownField", true },
},
}
},
{
"Conflicts",
new BsonArray
{
new BsonDocument
{
{ "ProviderId", "cisco" },
{ "Status", "affected" },
{ "DocumentDigest", "sha256:999" },
{ "Justification", "requiresconfiguration" },
{ "Detail", "Different guidance" },
{ "Reason", "policy_override" },
{ "Other", 1 },
},
}
},
{ "PolicyVersion", "2025.10" },
{ "PolicyRevisionId", "rev-1" },
{ "PolicyDigest", "sha256:abc" },
{ "Summary", "Vendor confirms not affected." },
{ "Unexpected", new BsonDocument { { "foo", "bar" } } },
};
await consensus.InsertOneAsync(document);
var store = new MongoVexConsensusStore(_database);
var result = await store.FindAsync(vulnerabilityId, productKey, CancellationToken.None);
Assert.NotNull(result);
Assert.Equal(vulnerabilityId, result!.VulnerabilityId);
Assert.Equal(productKey, result.Product.Key);
Assert.Equal("Example App", result.Product.Name);
Assert.Equal(VexConsensusStatus.NotAffected, result.Status);
Assert.Single(result.Sources);
var source = result.Sources[0];
Assert.Equal("red-hat", source.ProviderId);
Assert.Equal(VexClaimStatus.NotAffected, source.Status);
Assert.Equal("sha256:123", source.DocumentDigest);
Assert.Equal(0.9, source.Weight);
Assert.Equal(VexJustification.ComponentNotPresent, source.Justification);
Assert.NotNull(source.Confidence);
Assert.Equal("high", source.Confidence!.Level);
Assert.Equal(0.7, source.Confidence!.Score);
Assert.Equal("review", source.Confidence!.Method);
Assert.Single(result.Conflicts);
var conflict = result.Conflicts[0];
Assert.Equal("cisco", conflict.ProviderId);
Assert.Equal(VexClaimStatus.Affected, conflict.Status);
Assert.Equal(VexJustification.RequiresConfiguration, conflict.Justification);
Assert.Equal("policy_override", conflict.Reason);
Assert.Equal("Vendor confirms not affected.", result.Summary);
Assert.Equal("2025.10", result.PolicyVersion);
}
[Fact]
public async Task CacheIndex_RoundTripsGridFsMetadata()
{
var gridObjectId = ObjectId.GenerateNewId().ToString();
var index = new MongoVexCacheIndex(_database);
var signature = new VexQuerySignature("format=csaf|vendor=redhat");
var now = DateTimeOffset.UtcNow;
var expires = now.AddHours(12);
var entry = new VexCacheEntry(
signature,
VexExportFormat.Csaf,
new VexContentAddress("sha256", "abcdef123456"),
now,
sizeBytes: 1024,
manifestId: "manifest-001",
gridFsObjectId: gridObjectId,
expiresAt: expires);
await index.SaveAsync(entry, CancellationToken.None);
var cacheId = string.Format(
CultureInfo.InvariantCulture,
"{0}|{1}",
signature.Value,
entry.Format.ToString().ToLowerInvariant());
var cache = _database.GetCollection<BsonDocument>(VexMongoCollectionNames.Cache);
var filter = Builders<BsonDocument>.Filter.Eq("_id", cacheId);
var update = Builders<BsonDocument>.Update.Set("UnexpectedField", true);
await cache.UpdateOneAsync(filter, update);
var roundTrip = await index.FindAsync(signature, VexExportFormat.Csaf, CancellationToken.None);
Assert.NotNull(roundTrip);
Assert.Equal(entry.QuerySignature.Value, roundTrip!.QuerySignature.Value);
Assert.Equal(entry.Format, roundTrip.Format);
Assert.Equal(entry.Artifact.Digest, roundTrip.Artifact.Digest);
Assert.Equal(entry.ManifestId, roundTrip.ManifestId);
Assert.Equal(entry.GridFsObjectId, roundTrip.GridFsObjectId);
Assert.Equal(entry.SizeBytes, roundTrip.SizeBytes);
Assert.NotNull(roundTrip.ExpiresAt);
Assert.Equal(expires.ToUnixTimeMilliseconds(), roundTrip.ExpiresAt!.Value.ToUnixTimeMilliseconds());
}
public Task InitializeAsync() => Task.CompletedTask;
public Task DisposeAsync()
{
_runner.Dispose();
return Task.CompletedTask;
}
}

View File

@@ -0,0 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Vexer.Storage.Mongo\StellaOps.Vexer.Storage.Mongo.csproj" />
<ProjectReference Include="..\StellaOps.Vexer.Core\StellaOps.Vexer.Core.csproj" />
<ProjectReference Include="..\StellaOps.Vexer.Policy\StellaOps.Vexer.Policy.csproj" />
<ProjectReference Include="..\StellaOps.Feedser.Storage.Mongo\StellaOps.Feedser.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,59 @@
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using Mongo2Go;
using MongoDB.Driver;
using StellaOps.Vexer.Storage.Mongo.Migrations;
namespace StellaOps.Vexer.Storage.Mongo.Tests;
public sealed class VexMongoMigrationRunnerTests : IAsyncLifetime
{
private readonly MongoDbRunner _runner;
private readonly IMongoDatabase _database;
public VexMongoMigrationRunnerTests()
{
_runner = MongoDbRunner.Start();
var client = new MongoClient(_runner.ConnectionString);
_database = client.GetDatabase("vexer-migrations-tests");
}
[Fact]
public async Task RunAsync_AppliesInitialIndexesOnce()
{
var migration = new VexInitialIndexMigration();
var runner = new VexMongoMigrationRunner(_database, new[] { migration }, NullLogger<VexMongoMigrationRunner>.Instance);
await runner.RunAsync(CancellationToken.None);
await runner.RunAsync(CancellationToken.None);
var appliedCollection = _database.GetCollection<VexMigrationRecord>(VexMongoCollectionNames.Migrations);
var applied = await appliedCollection.Find(FilterDefinition<VexMigrationRecord>.Empty).ToListAsync();
Assert.Single(applied);
Assert.Equal(migration.Id, applied[0].Id);
Assert.True(HasIndex(_database.GetCollection<VexRawDocumentRecord>(VexMongoCollectionNames.Raw), "ProviderId_1_Format_1_RetrievedAt_1"));
Assert.True(HasIndex(_database.GetCollection<VexProviderRecord>(VexMongoCollectionNames.Providers), "Kind_1"));
Assert.True(HasIndex(_database.GetCollection<VexConsensusRecord>(VexMongoCollectionNames.Consensus), "VulnerabilityId_1_Product.Key_1"));
Assert.True(HasIndex(_database.GetCollection<VexConsensusRecord>(VexMongoCollectionNames.Consensus), "PolicyRevisionId_1_PolicyDigest_1"));
Assert.True(HasIndex(_database.GetCollection<VexExportManifestRecord>(VexMongoCollectionNames.Exports), "QuerySignature_1_Format_1"));
Assert.True(HasIndex(_database.GetCollection<VexCacheEntryRecord>(VexMongoCollectionNames.Cache), "QuerySignature_1_Format_1"));
Assert.True(HasIndex(_database.GetCollection<VexCacheEntryRecord>(VexMongoCollectionNames.Cache), "ExpiresAt_1"));
}
private static bool HasIndex<TDocument>(IMongoCollection<TDocument> collection, string name)
{
var indexes = collection.Indexes.List().ToList();
return indexes.Any(index => index["name"].AsString == name);
}
public Task InitializeAsync() => Task.CompletedTask;
public Task DisposeAsync()
{
_runner.Dispose();
return Task.CompletedTask;
}
}

View File

@@ -0,0 +1,41 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Storage.Mongo;
public interface IVexProviderStore
{
ValueTask<VexProvider?> FindAsync(string id, CancellationToken cancellationToken);
ValueTask<IReadOnlyCollection<VexProvider>> ListAsync(CancellationToken cancellationToken);
ValueTask SaveAsync(VexProvider provider, CancellationToken cancellationToken);
}
public interface IVexConsensusStore
{
ValueTask<VexConsensus?> FindAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken);
ValueTask<IReadOnlyCollection<VexConsensus>> FindByVulnerabilityAsync(string vulnerabilityId, CancellationToken cancellationToken);
ValueTask SaveAsync(VexConsensus consensus, CancellationToken cancellationToken);
}
public interface IVexCacheIndex
{
ValueTask<VexCacheEntry?> FindAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken);
ValueTask SaveAsync(VexCacheEntry entry, CancellationToken cancellationToken);
ValueTask RemoveAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken);
}
public interface IVexCacheMaintenance
{
ValueTask<int> RemoveExpiredAsync(DateTimeOffset asOf, CancellationToken cancellationToken);
ValueTask<int> RemoveMissingManifestReferencesAsync(CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,12 @@
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Driver;
namespace StellaOps.Vexer.Storage.Mongo.Migrations;
internal interface IVexMongoMigration
{
string Id { get; }
ValueTask ExecuteAsync(IMongoDatabase database, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,75 @@
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Driver;
namespace StellaOps.Vexer.Storage.Mongo.Migrations;
internal sealed class VexInitialIndexMigration : IVexMongoMigration
{
public string Id => "20251016-initial-indexes";
public async ValueTask ExecuteAsync(IMongoDatabase database, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(database);
await EnsureRawIndexesAsync(database, cancellationToken).ConfigureAwait(false);
await EnsureProviderIndexesAsync(database, cancellationToken).ConfigureAwait(false);
await EnsureConsensusIndexesAsync(database, cancellationToken).ConfigureAwait(false);
await EnsureExportIndexesAsync(database, cancellationToken).ConfigureAwait(false);
await EnsureCacheIndexesAsync(database, cancellationToken).ConfigureAwait(false);
}
private static Task EnsureRawIndexesAsync(IMongoDatabase database, CancellationToken cancellationToken)
{
var collection = database.GetCollection<VexRawDocumentRecord>(VexMongoCollectionNames.Raw);
var providerFormatIndex = Builders<VexRawDocumentRecord>.IndexKeys
.Ascending(x => x.ProviderId)
.Ascending(x => x.Format)
.Ascending(x => x.RetrievedAt);
return collection.Indexes.CreateOneAsync(new CreateIndexModel<VexRawDocumentRecord>(providerFormatIndex), cancellationToken: cancellationToken);
}
private static Task EnsureProviderIndexesAsync(IMongoDatabase database, CancellationToken cancellationToken)
{
var collection = database.GetCollection<VexProviderRecord>(VexMongoCollectionNames.Providers);
var kindIndex = Builders<VexProviderRecord>.IndexKeys.Ascending(x => x.Kind);
return collection.Indexes.CreateOneAsync(new CreateIndexModel<VexProviderRecord>(kindIndex), cancellationToken: cancellationToken);
}
private static Task EnsureConsensusIndexesAsync(IMongoDatabase database, CancellationToken cancellationToken)
{
var collection = database.GetCollection<VexConsensusRecord>(VexMongoCollectionNames.Consensus);
var vulnProductIndex = Builders<VexConsensusRecord>.IndexKeys
.Ascending(x => x.VulnerabilityId)
.Ascending(x => x.Product.Key);
var policyIndex = Builders<VexConsensusRecord>.IndexKeys
.Ascending(x => x.PolicyRevisionId)
.Ascending(x => x.PolicyDigest);
return Task.WhenAll(
collection.Indexes.CreateOneAsync(new CreateIndexModel<VexConsensusRecord>(vulnProductIndex, new CreateIndexOptions { Unique = true }), cancellationToken: cancellationToken),
collection.Indexes.CreateOneAsync(new CreateIndexModel<VexConsensusRecord>(policyIndex), cancellationToken: cancellationToken));
}
private static Task EnsureExportIndexesAsync(IMongoDatabase database, CancellationToken cancellationToken)
{
var collection = database.GetCollection<VexExportManifestRecord>(VexMongoCollectionNames.Exports);
var signatureIndex = Builders<VexExportManifestRecord>.IndexKeys
.Ascending(x => x.QuerySignature)
.Ascending(x => x.Format);
return collection.Indexes.CreateOneAsync(new CreateIndexModel<VexExportManifestRecord>(signatureIndex, new CreateIndexOptions { Unique = true }), cancellationToken: cancellationToken);
}
private static Task EnsureCacheIndexesAsync(IMongoDatabase database, CancellationToken cancellationToken)
{
var collection = database.GetCollection<VexCacheEntryRecord>(VexMongoCollectionNames.Cache);
var signatureIndex = Builders<VexCacheEntryRecord>.IndexKeys
.Ascending(x => x.QuerySignature)
.Ascending(x => x.Format);
var expirationIndex = Builders<VexCacheEntryRecord>.IndexKeys.Ascending(x => x.ExpiresAt);
return Task.WhenAll(
collection.Indexes.CreateOneAsync(new CreateIndexModel<VexCacheEntryRecord>(signatureIndex, new CreateIndexOptions { Unique = true }), cancellationToken: cancellationToken),
collection.Indexes.CreateOneAsync(new CreateIndexModel<VexCacheEntryRecord>(expirationIndex, new CreateIndexOptions { ExpireAfter = TimeSpan.FromSeconds(0) }), cancellationToken: cancellationToken));
}
}

View File

@@ -0,0 +1,18 @@
using System;
using MongoDB.Bson.Serialization.Attributes;
namespace StellaOps.Vexer.Storage.Mongo.Migrations;
internal sealed class VexMigrationRecord
{
public VexMigrationRecord(string id, DateTimeOffset executedAt)
{
Id = string.IsNullOrWhiteSpace(id) ? throw new ArgumentException("Migration id must be provided.", nameof(id)) : id.Trim();
ExecutedAt = executedAt;
}
[BsonId]
public string Id { get; }
public DateTimeOffset ExecutedAt { get; }
}

View File

@@ -0,0 +1,22 @@
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Hosting;
namespace StellaOps.Vexer.Storage.Mongo.Migrations;
internal sealed class VexMongoMigrationHostedService : IHostedService
{
private readonly VexMongoMigrationRunner _runner;
public VexMongoMigrationHostedService(VexMongoMigrationRunner runner)
{
_runner = runner ?? throw new ArgumentNullException(nameof(runner));
}
public async Task StartAsync(CancellationToken cancellationToken)
{
await _runner.RunAsync(cancellationToken).ConfigureAwait(false);
}
public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask;
}

View File

@@ -0,0 +1,74 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using MongoDB.Driver;
namespace StellaOps.Vexer.Storage.Mongo.Migrations;
internal sealed class VexMongoMigrationRunner
{
private readonly IMongoDatabase _database;
private readonly IReadOnlyList<IVexMongoMigration> _migrations;
private readonly ILogger<VexMongoMigrationRunner> _logger;
public VexMongoMigrationRunner(
IMongoDatabase database,
IEnumerable<IVexMongoMigration> migrations,
ILogger<VexMongoMigrationRunner> logger)
{
_database = database ?? throw new ArgumentNullException(nameof(database));
_migrations = (migrations ?? throw new ArgumentNullException(nameof(migrations)))
.OrderBy(migration => migration.Id, StringComparer.Ordinal)
.ToArray();
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async ValueTask RunAsync(CancellationToken cancellationToken)
{
if (_migrations.Count == 0)
{
return;
}
var migrationsCollection = _database.GetCollection<VexMigrationRecord>(VexMongoCollectionNames.Migrations);
await EnsureMigrationsIndexAsync(migrationsCollection, cancellationToken).ConfigureAwait(false);
var applied = await LoadAppliedMigrationsAsync(migrationsCollection, cancellationToken).ConfigureAwait(false);
foreach (var migration in _migrations)
{
if (applied.Contains(migration.Id))
{
continue;
}
_logger.LogInformation("Applying Vexer Mongo migration {MigrationId}", migration.Id);
await migration.ExecuteAsync(_database, cancellationToken).ConfigureAwait(false);
var record = new VexMigrationRecord(migration.Id, DateTimeOffset.UtcNow);
await migrationsCollection.InsertOneAsync(record, cancellationToken: cancellationToken).ConfigureAwait(false);
_logger.LogInformation("Completed Vexer Mongo migration {MigrationId}", migration.Id);
}
}
private static ValueTask EnsureMigrationsIndexAsync(
IMongoCollection<VexMigrationRecord> collection,
CancellationToken cancellationToken)
{
// default _id index already enforces uniqueness
return ValueTask.CompletedTask;
}
private static async ValueTask<HashSet<string>> LoadAppliedMigrationsAsync(
IMongoCollection<VexMigrationRecord> collection,
CancellationToken cancellationToken)
{
var records = await collection.Find(FilterDefinition<VexMigrationRecord>.Empty)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return records.Select(static record => record.Id)
.ToHashSet(StringComparer.Ordinal);
}
}

View File

@@ -0,0 +1,43 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Storage.Mongo;
public sealed class MongoVexCacheIndex : IVexCacheIndex
{
private readonly IMongoCollection<VexCacheEntryRecord> _collection;
public MongoVexCacheIndex(IMongoDatabase database)
{
ArgumentNullException.ThrowIfNull(database);
VexMongoMappingRegistry.Register();
_collection = database.GetCollection<VexCacheEntryRecord>(VexMongoCollectionNames.Cache);
}
public async ValueTask<VexCacheEntry?> FindAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(signature);
var filter = Builders<VexCacheEntryRecord>.Filter.Eq(x => x.Id, VexCacheEntryRecord.CreateId(signature, format));
var record = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return record?.ToDomain();
}
public async ValueTask SaveAsync(VexCacheEntry entry, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(entry);
var record = VexCacheEntryRecord.FromDomain(entry);
var filter = Builders<VexCacheEntryRecord>.Filter.Eq(x => x.Id, record.Id);
await _collection.ReplaceOneAsync(filter, record, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false);
}
public async ValueTask RemoveAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(signature);
var filter = Builders<VexCacheEntryRecord>.Filter.Eq(x => x.Id, VexCacheEntryRecord.CreateId(signature, format));
await _collection.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,85 @@
using System.Collections.Generic;
using Microsoft.Extensions.Logging;
using MongoDB.Driver;
namespace StellaOps.Vexer.Storage.Mongo;
internal sealed class MongoVexCacheMaintenance : IVexCacheMaintenance
{
private readonly IMongoCollection<VexCacheEntryRecord> _cache;
private readonly IMongoCollection<VexExportManifestRecord> _exports;
private readonly ILogger<MongoVexCacheMaintenance> _logger;
public MongoVexCacheMaintenance(
IMongoDatabase database,
ILogger<MongoVexCacheMaintenance> logger)
{
ArgumentNullException.ThrowIfNull(database);
ArgumentNullException.ThrowIfNull(logger);
VexMongoMappingRegistry.Register();
_cache = database.GetCollection<VexCacheEntryRecord>(VexMongoCollectionNames.Cache);
_exports = database.GetCollection<VexExportManifestRecord>(VexMongoCollectionNames.Exports);
_logger = logger;
}
public async ValueTask<int> RemoveExpiredAsync(DateTimeOffset asOf, CancellationToken cancellationToken)
{
var cutoff = asOf.UtcDateTime;
var filter = Builders<VexCacheEntryRecord>.Filter.Lt(x => x.ExpiresAt, cutoff);
var result = await _cache.DeleteManyAsync(filter, cancellationToken).ConfigureAwait(false);
var removed = (int)result.DeletedCount;
if (removed > 0)
{
_logger.LogInformation("Pruned {Count} expired VEX export cache entries (cutoff {Cutoff})", removed, cutoff);
}
return removed;
}
public async ValueTask<int> RemoveMissingManifestReferencesAsync(CancellationToken cancellationToken)
{
var filter = Builders<VexCacheEntryRecord>.Filter.Ne(x => x.ManifestId, null);
var cursor = await _cache.Find(filter).ToListAsync(cancellationToken).ConfigureAwait(false);
if (cursor.Count == 0)
{
return 0;
}
var danglingIds = new List<string>(cursor.Count);
foreach (var entry in cursor)
{
if (string.IsNullOrWhiteSpace(entry.ManifestId))
{
continue;
}
var manifestExists = await _exports
.Find(Builders<VexExportManifestRecord>.Filter.Eq(x => x.Id, entry.ManifestId))
.Limit(1)
.AnyAsync(cancellationToken)
.ConfigureAwait(false);
if (!manifestExists)
{
danglingIds.Add(entry.Id);
}
}
if (danglingIds.Count == 0)
{
return 0;
}
var danglingFilter = Builders<VexCacheEntryRecord>.Filter.In(x => x.Id, danglingIds);
var result = await _cache.DeleteManyAsync(danglingFilter, cancellationToken).ConfigureAwait(false);
var removed = (int)result.DeletedCount;
_logger.LogWarning("Removed {Count} cache entries referencing missing export manifests.", removed);
return removed;
}
}

View File

@@ -0,0 +1,46 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Driver;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Storage.Mongo;
public sealed class MongoVexConsensusStore : IVexConsensusStore
{
private readonly IMongoCollection<VexConsensusRecord> _collection;
public MongoVexConsensusStore(IMongoDatabase database)
{
ArgumentNullException.ThrowIfNull(database);
VexMongoMappingRegistry.Register();
_collection = database.GetCollection<VexConsensusRecord>(VexMongoCollectionNames.Consensus);
}
public async ValueTask<VexConsensus?> FindAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId);
ArgumentException.ThrowIfNullOrWhiteSpace(productKey);
var id = VexConsensusRecord.CreateId(vulnerabilityId, productKey);
var filter = Builders<VexConsensusRecord>.Filter.Eq(x => x.Id, id);
var record = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return record?.ToDomain();
}
public async ValueTask<IReadOnlyCollection<VexConsensus>> FindByVulnerabilityAsync(string vulnerabilityId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId);
var filter = Builders<VexConsensusRecord>.Filter.Eq(x => x.VulnerabilityId, vulnerabilityId.Trim());
var records = await _collection.Find(filter).ToListAsync(cancellationToken).ConfigureAwait(false);
return records.ConvertAll(static record => record.ToDomain());
}
public async ValueTask SaveAsync(VexConsensus consensus, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(consensus);
var record = VexConsensusRecord.FromDomain(consensus);
var filter = Builders<VexConsensusRecord>.Filter.Eq(x => x.Id, record.Id);
await _collection.ReplaceOneAsync(filter, record, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false);
}
}

View File

@@ -1,46 +1,150 @@
using System;
using System.ComponentModel.DataAnnotations;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Options;
using MongoDB.Driver;
using MongoDB.Driver.Core.Clusters;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Storage.Mongo;
public sealed class MongoVexExportStore : IVexExportStore
{
private readonly IMongoCollection<VexExportManifestRecord> _collection;
private readonly IMongoClient _client;
private readonly IMongoCollection<VexExportManifestRecord> _exports;
private readonly IMongoCollection<VexCacheEntryRecord> _cache;
private readonly VexMongoStorageOptions _options;
public MongoVexExportStore(IMongoDatabase database)
public MongoVexExportStore(
IMongoClient client,
IMongoDatabase database,
IOptions<VexMongoStorageOptions> options)
{
_client = client ?? throw new ArgumentNullException(nameof(client));
ArgumentNullException.ThrowIfNull(database);
ArgumentNullException.ThrowIfNull(options);
_options = options.Value;
Validator.ValidateObject(_options, new ValidationContext(_options), validateAllProperties: true);
VexMongoMappingRegistry.Register();
_collection = database.GetCollection<VexExportManifestRecord>(VexMongoCollectionNames.Exports);
EnsureIndexes(_collection);
_exports = database.GetCollection<VexExportManifestRecord>(VexMongoCollectionNames.Exports);
_cache = database.GetCollection<VexCacheEntryRecord>(VexMongoCollectionNames.Cache);
}
public async ValueTask<VexExportManifest?> FindAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(signature);
var id = VexExportManifestRecord.CreateId(signature, format);
var filter = Builders<VexExportManifestRecord>.Filter.Eq(x => x.Id, id);
var entity = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return entity?.ToDomain();
var cacheId = VexCacheEntryRecord.CreateId(signature, format);
var cacheFilter = Builders<VexCacheEntryRecord>.Filter.Eq(x => x.Id, cacheId);
var cacheRecord = await _cache.Find(cacheFilter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
if (cacheRecord is null)
{
return null;
}
if (cacheRecord.ExpiresAt is DateTime expiresAt && expiresAt <= DateTime.UtcNow)
{
await _cache.DeleteOneAsync(cacheFilter, cancellationToken).ConfigureAwait(false);
return null;
}
var manifestId = VexExportManifestRecord.CreateId(signature, format);
var manifestFilter = Builders<VexExportManifestRecord>.Filter.Eq(x => x.Id, manifestId);
var manifest = await _exports.Find(manifestFilter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
if (manifest is null)
{
await _cache.DeleteOneAsync(cacheFilter, cancellationToken).ConfigureAwait(false);
return null;
}
if (!string.IsNullOrWhiteSpace(cacheRecord.ManifestId) &&
!string.Equals(cacheRecord.ManifestId, manifest.Id, StringComparison.Ordinal))
{
await _cache.DeleteOneAsync(cacheFilter, cancellationToken).ConfigureAwait(false);
return null;
}
return manifest.ToDomain();
}
public async ValueTask SaveAsync(VexExportManifest manifest, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(manifest);
var entity = VexExportManifestRecord.FromDomain(manifest);
var filter = Builders<VexExportManifestRecord>.Filter.Eq(x => x.Id, entity.Id);
await _collection.ReplaceOneAsync(filter, entity, new ReplaceOptions { IsUpsert = true }, cancellationToken)
.ConfigureAwait(false);
using var session = await _client.StartSessionAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
var supportsTransactions = session.Client.Cluster.Description.Type != ClusterType.Standalone;
var startedTransaction = false;
if (supportsTransactions)
{
try
{
session.StartTransaction();
startedTransaction = true;
}
catch (NotSupportedException)
{
supportsTransactions = false;
}
}
try
{
var manifestRecord = VexExportManifestRecord.FromDomain(manifest);
var manifestFilter = Builders<VexExportManifestRecord>.Filter.Eq(x => x.Id, manifestRecord.Id);
await _exports
.ReplaceOneAsync(
session,
manifestFilter,
manifestRecord,
new ReplaceOptions { IsUpsert = true },
cancellationToken)
.ConfigureAwait(false);
var cacheEntry = CreateCacheEntry(manifest);
var cacheRecord = VexCacheEntryRecord.FromDomain(cacheEntry);
var cacheFilter = Builders<VexCacheEntryRecord>.Filter.Eq(x => x.Id, cacheRecord.Id);
await _cache
.ReplaceOneAsync(
session,
cacheFilter,
cacheRecord,
new ReplaceOptions { IsUpsert = true },
cancellationToken)
.ConfigureAwait(false);
if (startedTransaction)
{
await session.CommitTransactionAsync(cancellationToken).ConfigureAwait(false);
}
}
catch
{
if (startedTransaction && session.IsInTransaction)
{
await session.AbortTransactionAsync(cancellationToken).ConfigureAwait(false);
}
throw;
}
}
private static void EnsureIndexes(IMongoCollection<VexExportManifestRecord> collection)
private VexCacheEntry CreateCacheEntry(VexExportManifest manifest)
{
var keys = Builders<VexExportManifestRecord>.IndexKeys
.Ascending(x => x.QuerySignature)
.Ascending(x => x.Format);
var model = new CreateIndexModel<VexExportManifestRecord>(keys);
_ = collection.Indexes.CreateOne(model);
var expiresAt = manifest.CreatedAt + _options.ExportCacheTtl;
return new VexCacheEntry(
manifest.QuerySignature,
manifest.Format,
manifest.Artifact,
manifest.CreatedAt,
manifest.SizeBytes,
manifestId: manifest.ExportId,
gridFsObjectId: null,
expiresAt: expiresAt);
}
}

View File

@@ -0,0 +1,45 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Driver;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Storage.Mongo;
public sealed class MongoVexProviderStore : IVexProviderStore
{
private readonly IMongoCollection<VexProviderRecord> _collection;
public MongoVexProviderStore(IMongoDatabase database)
{
ArgumentNullException.ThrowIfNull(database);
VexMongoMappingRegistry.Register();
_collection = database.GetCollection<VexProviderRecord>(VexMongoCollectionNames.Providers);
}
public async ValueTask<VexProvider?> FindAsync(string id, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(id);
var filter = Builders<VexProviderRecord>.Filter.Eq(x => x.Id, id.Trim());
var record = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return record?.ToDomain();
}
public async ValueTask<IReadOnlyCollection<VexProvider>> ListAsync(CancellationToken cancellationToken)
{
var records = await _collection.Find(FilterDefinition<VexProviderRecord>.Empty)
.Sort(Builders<VexProviderRecord>.Sort.Ascending(x => x.Id))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return records.ConvertAll(static record => record.ToDomain());
}
public async ValueTask SaveAsync(VexProvider provider, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(provider);
var record = VexProviderRecord.FromDomain(provider);
var filter = Builders<VexProviderRecord>.Filter.Eq(x => x.Id, record.Id);
await _collection.ReplaceOneAsync(filter, record, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false);
}
}

View File

@@ -1,33 +1,130 @@
using System;
using System.ComponentModel.DataAnnotations;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using MongoDB.Driver;
using MongoDB.Driver.Core.Clusters;
using MongoDB.Driver.GridFS;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Storage.Mongo;
public sealed class MongoVexRawStore : IVexRawStore
{
private readonly IMongoClient _client;
private readonly IMongoCollection<VexRawDocumentRecord> _collection;
private readonly GridFSBucket _bucket;
private readonly VexMongoStorageOptions _options;
public MongoVexRawStore(IMongoDatabase database)
public MongoVexRawStore(
IMongoClient client,
IMongoDatabase database,
IOptions<VexMongoStorageOptions> options)
{
if (database is null)
{
throw new ArgumentNullException(nameof(database));
}
_client = client ?? throw new ArgumentNullException(nameof(client));
ArgumentNullException.ThrowIfNull(database);
ArgumentNullException.ThrowIfNull(options);
_options = options.Value;
Validator.ValidateObject(_options, new ValidationContext(_options), validateAllProperties: true);
VexMongoMappingRegistry.Register();
_collection = database.GetCollection<VexRawDocumentRecord>(VexMongoCollectionNames.Raw);
EnsureIndexes(_collection);
_bucket = new GridFSBucket(database, new GridFSBucketOptions
{
BucketName = _options.RawBucketName,
ReadConcern = database.Settings.ReadConcern,
ReadPreference = database.Settings.ReadPreference,
WriteConcern = database.Settings.WriteConcern,
});
}
public async ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(document);
var record = VexRawDocumentRecord.FromDomain(document);
var filter = Builders<VexRawDocumentRecord>.Filter.Eq(x => x.Id, record.Id);
await _collection.ReplaceOneAsync(filter, record, new ReplaceOptions { IsUpsert = true }, cancellationToken)
.ConfigureAwait(false);
var threshold = _options.GridFsInlineThresholdBytes;
var useInline = threshold == 0 || document.Content.Length <= threshold;
string? newGridId = null;
string? oldGridIdToDelete = null;
if (!useInline)
{
newGridId = await UploadToGridFsAsync(document, cancellationToken).ConfigureAwait(false);
}
using var session = await _client.StartSessionAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
var supportsTransactions = session.Client.Cluster.Description.Type != ClusterType.Standalone;
var startedTransaction = false;
if (supportsTransactions)
{
try
{
session.StartTransaction();
startedTransaction = true;
}
catch (NotSupportedException)
{
supportsTransactions = false;
}
}
try
{
var filter = Builders<VexRawDocumentRecord>.Filter.Eq(x => x.Id, document.Digest);
var existing = await _collection
.Find(session, filter)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
var record = VexRawDocumentRecord.FromDomain(document, includeContent: useInline);
record.GridFsObjectId = useInline ? null : newGridId;
await _collection
.ReplaceOneAsync(
session,
filter,
record,
new ReplaceOptions { IsUpsert = true },
cancellationToken)
.ConfigureAwait(false);
if (existing?.GridFsObjectId is string oldGridId && !string.IsNullOrWhiteSpace(oldGridId))
{
if (useInline || !string.Equals(newGridId, oldGridId, StringComparison.Ordinal))
{
oldGridIdToDelete = oldGridId;
}
}
if (startedTransaction)
{
await session.CommitTransactionAsync(cancellationToken).ConfigureAwait(false);
}
}
catch
{
if (startedTransaction && session.IsInTransaction)
{
await session.AbortTransactionAsync(cancellationToken).ConfigureAwait(false);
}
if (!useInline && !string.IsNullOrWhiteSpace(newGridId))
{
await DeleteFromGridFsAsync(newGridId, cancellationToken).ConfigureAwait(false);
}
throw;
}
if (!string.IsNullOrWhiteSpace(oldGridIdToDelete))
{
await DeleteFromGridFsAsync(oldGridIdToDelete!, cancellationToken).ConfigureAwait(false);
}
}
public async ValueTask<VexRawDocument?> FindByDigestAsync(string digest, CancellationToken cancellationToken)
@@ -37,17 +134,66 @@ public sealed class MongoVexRawStore : IVexRawStore
throw new ArgumentException("Digest must be provided.", nameof(digest));
}
var filter = Builders<VexRawDocumentRecord>.Filter.Eq(x => x.Id, digest.Trim());
var trimmed = digest.Trim();
var filter = Builders<VexRawDocumentRecord>.Filter.Eq(x => x.Id, trimmed);
var record = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return record?.ToDomain();
if (record is null)
{
return null;
}
if (!string.IsNullOrWhiteSpace(record.GridFsObjectId))
{
var bytes = await DownloadFromGridFsAsync(record.GridFsObjectId, cancellationToken).ConfigureAwait(false);
return record.ToDomain(new ReadOnlyMemory<byte>(bytes));
}
return record.ToDomain();
}
private static void EnsureIndexes(IMongoCollection<VexRawDocumentRecord> collection)
private async Task<string?> UploadToGridFsAsync(VexRawDocument document, CancellationToken cancellationToken)
{
var keys = Builders<VexRawDocumentRecord>.IndexKeys
.Ascending(x => x.ProviderId)
.Ascending(x => x.SourceUri);
var model = new CreateIndexModel<VexRawDocumentRecord>(keys);
_ = collection.Indexes.CreateOne(model);
using var stream = new MemoryStream(document.Content.ToArray(), writable: false);
var metadata = new BsonDocument
{
{ "providerId", document.ProviderId },
{ "format", document.Format.ToString().ToLowerInvariant() },
{ "sourceUri", document.SourceUri.ToString() },
{ "retrievedAt", document.RetrievedAt.UtcDateTime },
};
var options = new GridFSUploadOptions { Metadata = metadata };
var objectId = await _bucket
.UploadFromStreamAsync(document.Digest, stream, options, cancellationToken)
.ConfigureAwait(false);
return objectId.ToString();
}
private async Task DeleteFromGridFsAsync(string gridFsObjectId, CancellationToken cancellationToken)
{
if (!ObjectId.TryParse(gridFsObjectId, out var objectId))
{
return;
}
try
{
await _bucket.DeleteAsync(objectId, cancellationToken).ConfigureAwait(false);
}
catch (GridFSFileNotFoundException)
{
// file already removed by TTL or manual cleanup
}
}
private async Task<byte[]> DownloadFromGridFsAsync(string gridFsObjectId, CancellationToken cancellationToken)
{
if (!ObjectId.TryParse(gridFsObjectId, out var objectId))
{
return Array.Empty<byte>();
}
return await _bucket.DownloadAsBytesAsync(objectId, cancellationToken: cancellationToken).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Vexer.Storage.Mongo.Tests")]

View File

@@ -1,4 +1,6 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using StellaOps.Vexer.Storage.Mongo.Migrations;
namespace StellaOps.Vexer.Storage.Mongo;
@@ -6,8 +8,17 @@ public static class VexMongoServiceCollectionExtensions
{
public static IServiceCollection AddVexerMongoStorage(this IServiceCollection services)
{
services.AddOptions<VexMongoStorageOptions>();
services.AddSingleton<IVexRawStore, MongoVexRawStore>();
services.AddSingleton<IVexExportStore, MongoVexExportStore>();
services.AddSingleton<IVexProviderStore, MongoVexProviderStore>();
services.AddSingleton<IVexConsensusStore, MongoVexConsensusStore>();
services.AddSingleton<IVexCacheIndex, MongoVexCacheIndex>();
services.AddSingleton<IVexCacheMaintenance, MongoVexCacheMaintenance>();
services.AddSingleton<IVexMongoMigration, VexInitialIndexMigration>();
services.AddSingleton<VexMongoMigrationRunner>();
services.AddHostedService<VexMongoMigrationHostedService>();
return services;
}
}

View File

@@ -9,6 +9,7 @@
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="8.0.0" />
<PackageReference Include="MongoDB.Driver" Version="2.22.0" />
<PackageReference Include="MongoDB.Driver.GridFS" Version="2.22.0" />
</ItemGroup>

View File

@@ -3,6 +3,8 @@ If you are working on this file you need to read docs/ARCHITECTURE_VEXER.md and
| Task | Owner(s) | Depends on | Notes |
|---|---|---|---|
|VEXER-STORAGE-01-001 Collection schemas & class maps|Team Vexer Storage|VEXER-CORE-01-001|DONE (2025-10-15) Added Mongo mapping registry with raw/export entities and service registration groundwork.|
|VEXER-STORAGE-01-002 Migrations & indices bootstrap|Team Vexer Storage|VEXER-STORAGE-01-001|TODO Add bootstrapper creating indices (claims by vulnId/product, exports by querySignature, etc.) and migrations for existing deployments.|
|VEXER-STORAGE-01-003 Repository layer & transactional flows|Team Vexer Storage|VEXER-STORAGE-01-001|TODO Provide repository APIs for ingest upserts, export registration, cache lookups, and GridFS raw storage with deterministic transactions.|
|VEXER-STORAGE-01-004 Provider/consensus/cache mappings|Team Vexer Storage|VEXER-STORAGE-01-001|TODO Implement Bson class maps and collections for providers, consensus snapshots, and cache index (including GridFS linkage).|
|VEXER-STORAGE-01-002 Migrations & indices bootstrap|Team Vexer Storage|VEXER-STORAGE-01-001|**DONE (2025-10-16)** Add bootstrapper creating indices (claims by vulnId/product, exports by querySignature, etc.) and migrations for existing deployments.<br>2025-10-16: Introduced migration runner + hosted service, initial index migration covers raw/providers/consensus/exports/cache, and tests use Mongo2Go to verify execution.|
|VEXER-STORAGE-01-003 Repository layer & transactional flows|Team Vexer Storage|VEXER-STORAGE-01-001|**DONE (2025-10-16)** Added GridFS-backed raw store with transactional upserts (including fallback for non-replicaset Mongo), export/cache repository coordination, and coverage verifying cache TTL + GridFS round-trips.|
|VEXER-STORAGE-01-004 Provider/consensus/cache mappings|Team Vexer Storage|VEXER-STORAGE-01-001|**DONE (2025-10-16)** Registered MongoDB class maps for provider/consensus/cache records with forward-compatible field handling and added coverage ensuring GridFS-linked cache entries round-trip cleanly.|
|VEXER-STORAGE-02-001 Statement events & scoring signals|Team Vexer Storage|VEXER-CORE-02-001|TODO Add immutable `vex.statements` collection, extend consensus documents with severity/KEV/EPSS fields, build indices for `policyRevisionId`/`generatedAt`, and script migrations/backfill guidance for Phase 1 rollout.|
|VEXER-STORAGE-MONGO-08-001 Session + causal consistency hardening|Team Vexer Storage|VEXER-STORAGE-01-003|TODO Register Mongo client/database with majority read/write concerns, expose scoped session helper enabling causal consistency, thread session handles through raw/export/consensus/cache stores (including GridFS reads), and extend integration tests to verify read-your-write semantics during replica-set failover.|

View File

@@ -15,15 +15,51 @@ public static class VexMongoMappingRegistry
return;
}
if (!BsonSerializer.IsSerializerRegistered(typeof(byte[])))
try
{
BsonSerializer.RegisterSerializer(new ByteArraySerializer());
BsonSerializer.RegisterSerializer(typeof(byte[]), new ByteArraySerializer());
}
catch
{
// serializer already registered safe to ignore
}
RegisterClassMaps();
}
private static void RegisterClassMaps()
{
RegisterClassMap<VexProviderRecord>();
RegisterClassMap<VexProviderDiscoveryDocument>();
RegisterClassMap<VexProviderTrustDocument>();
RegisterClassMap<VexCosignTrustDocument>();
RegisterClassMap<VexConsensusRecord>();
RegisterClassMap<VexProductDocument>();
RegisterClassMap<VexConsensusSourceDocument>();
RegisterClassMap<VexConsensusConflictDocument>();
RegisterClassMap<VexConfidenceDocument>();
RegisterClassMap<VexCacheEntryRecord>();
}
private static void RegisterClassMap<TDocument>()
where TDocument : class
{
if (BsonClassMap.IsClassMapRegistered(typeof(TDocument)))
{
return;
}
BsonClassMap.RegisterClassMap<TDocument>(classMap =>
{
classMap.AutoMap();
classMap.SetIgnoreExtraElements(true);
});
}
}
public static class VexMongoCollectionNames
{
public const string Migrations = "vex.migrations";
public const string Providers = "vex.providers";
public const string Raw = "vex.raw";
public const string Claims = "vex.claims";

View File

@@ -3,11 +3,13 @@ using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.Linq;
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using StellaOps.Vexer.Core;
namespace StellaOps.Vexer.Storage.Mongo;
[BsonIgnoreExtraElements]
internal sealed class VexRawDocumentRecord
{
[BsonId]
@@ -26,9 +28,13 @@ internal sealed class VexRawDocumentRecord
public byte[] Content { get; set; } = Array.Empty<byte>();
[BsonRepresentation(BsonType.ObjectId)]
public string? GridFsObjectId { get; set; }
= null;
public Dictionary<string, string> Metadata { get; set; } = new(StringComparer.Ordinal);
public static VexRawDocumentRecord FromDomain(VexRawDocument document)
public static VexRawDocumentRecord FromDomain(VexRawDocument document, bool includeContent = true)
=> new()
{
Id = document.Digest,
@@ -37,22 +43,26 @@ internal sealed class VexRawDocumentRecord
SourceUri = document.SourceUri.ToString(),
RetrievedAt = document.RetrievedAt.UtcDateTime,
Digest = document.Digest,
Content = document.Content.ToArray(),
Content = includeContent ? document.Content.ToArray() : Array.Empty<byte>(),
Metadata = document.Metadata.ToDictionary(kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal),
};
public VexRawDocument ToDomain()
=> ToDomain(new ReadOnlyMemory<byte>(Content ?? Array.Empty<byte>()));
public VexRawDocument ToDomain(ReadOnlyMemory<byte> content)
=> new(
ProviderId,
Enum.Parse<VexDocumentFormat>(Format, ignoreCase: true),
new Uri(SourceUri),
RetrievedAt,
Digest,
new ReadOnlyMemory<byte>(Content ?? Array.Empty<byte>()),
content,
(Metadata ?? new Dictionary<string, string>(StringComparer.Ordinal))
.ToImmutableDictionary(StringComparer.Ordinal));
}
[BsonIgnoreExtraElements]
internal sealed class VexExportManifestRecord
{
[BsonId]
@@ -162,5 +172,401 @@ internal sealed class VexExportManifestRecord
}
public static string CreateId(VexQuerySignature signature, VexExportFormat format)
=> string.Create(CultureInfo.InvariantCulture, $"{signature.Value}|{format.ToString().ToLowerInvariant()}");
=> string.Format(CultureInfo.InvariantCulture, "{0}|{1}", signature.Value, format.ToString().ToLowerInvariant());
}
[BsonIgnoreExtraElements]
internal sealed class VexProviderRecord
{
[BsonId]
public string Id { get; set; } = default!;
public string DisplayName { get; set; } = default!;
public string Kind { get; set; } = default!;
public List<string> BaseUris { get; set; } = new();
public VexProviderDiscoveryDocument? Discovery { get; set; }
= null;
public VexProviderTrustDocument? Trust { get; set; }
= null;
public bool Enabled { get; set; }
= true;
public static VexProviderRecord FromDomain(VexProvider provider)
=> new()
{
Id = provider.Id,
DisplayName = provider.DisplayName,
Kind = provider.Kind.ToString().ToLowerInvariant(),
BaseUris = provider.BaseUris.Select(uri => uri.ToString()).ToList(),
Discovery = VexProviderDiscoveryDocument.FromDomain(provider.Discovery),
Trust = VexProviderTrustDocument.FromDomain(provider.Trust),
Enabled = provider.Enabled,
};
public VexProvider ToDomain()
{
var uris = BaseUris?.Select(uri => new Uri(uri)) ?? Enumerable.Empty<Uri>();
return new VexProvider(
Id,
DisplayName,
Enum.Parse<VexProviderKind>(Kind, ignoreCase: true),
uris,
Discovery?.ToDomain(),
Trust?.ToDomain(),
Enabled);
}
}
[BsonIgnoreExtraElements]
internal sealed class VexProviderDiscoveryDocument
{
public string? WellKnownMetadata { get; set; }
= null;
public string? RolIeService { get; set; }
= null;
public static VexProviderDiscoveryDocument? FromDomain(VexProviderDiscovery? discovery)
=> discovery is null
? null
: new VexProviderDiscoveryDocument
{
WellKnownMetadata = discovery.WellKnownMetadata?.ToString(),
RolIeService = discovery.RolIeService?.ToString(),
};
public VexProviderDiscovery ToDomain()
=> new(
WellKnownMetadata is null ? null : new Uri(WellKnownMetadata),
RolIeService is null ? null : new Uri(RolIeService));
}
[BsonIgnoreExtraElements]
internal sealed class VexProviderTrustDocument
{
public double Weight { get; set; }
= 1.0;
public VexCosignTrustDocument? Cosign { get; set; }
= null;
public List<string> PgpFingerprints { get; set; } = new();
public static VexProviderTrustDocument? FromDomain(VexProviderTrust? trust)
=> trust is null
? null
: new VexProviderTrustDocument
{
Weight = trust.Weight,
Cosign = trust.Cosign is null ? null : VexCosignTrustDocument.FromDomain(trust.Cosign),
PgpFingerprints = trust.PgpFingerprints.ToList(),
};
public VexProviderTrust ToDomain()
=> new(
Weight,
Cosign?.ToDomain(),
PgpFingerprints);
}
[BsonIgnoreExtraElements]
internal sealed class VexCosignTrustDocument
{
public string Issuer { get; set; } = default!;
public string IdentityPattern { get; set; } = default!;
public static VexCosignTrustDocument FromDomain(VexCosignTrust trust)
=> new()
{
Issuer = trust.Issuer,
IdentityPattern = trust.IdentityPattern,
};
public VexCosignTrust ToDomain()
=> new(Issuer, IdentityPattern);
}
[BsonIgnoreExtraElements]
internal sealed class VexConsensusRecord
{
[BsonId]
public string Id { get; set; } = default!;
public string VulnerabilityId { get; set; } = default!;
public VexProductDocument Product { get; set; } = default!;
public string Status { get; set; } = default!;
public DateTime CalculatedAt { get; set; }
= DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc);
public List<VexConsensusSourceDocument> Sources { get; set; } = new();
public List<VexConsensusConflictDocument> Conflicts { get; set; } = new();
public string? PolicyVersion { get; set; }
= null;
public string? PolicyRevisionId { get; set; }
= null;
public string? PolicyDigest { get; set; }
= null;
public string? Summary { get; set; }
= null;
public static string CreateId(string vulnerabilityId, string productKey)
=> string.Format(CultureInfo.InvariantCulture, "{0}|{1}", vulnerabilityId.Trim(), productKey.Trim());
public static VexConsensusRecord FromDomain(VexConsensus consensus)
=> new()
{
Id = CreateId(consensus.VulnerabilityId, consensus.Product.Key),
VulnerabilityId = consensus.VulnerabilityId,
Product = VexProductDocument.FromDomain(consensus.Product),
Status = consensus.Status.ToString().ToLowerInvariant(),
CalculatedAt = consensus.CalculatedAt.UtcDateTime,
Sources = consensus.Sources.Select(VexConsensusSourceDocument.FromDomain).ToList(),
Conflicts = consensus.Conflicts.Select(VexConsensusConflictDocument.FromDomain).ToList(),
PolicyVersion = consensus.PolicyVersion,
PolicyRevisionId = consensus.PolicyRevisionId,
PolicyDigest = consensus.PolicyDigest,
Summary = consensus.Summary,
};
public VexConsensus ToDomain()
=> new(
VulnerabilityId,
Product.ToDomain(),
Enum.Parse<VexConsensusStatus>(Status, ignoreCase: true),
new DateTimeOffset(CalculatedAt, TimeSpan.Zero),
Sources.Select(static source => source.ToDomain()),
Conflicts.Select(static conflict => conflict.ToDomain()),
PolicyVersion,
Summary,
PolicyRevisionId,
PolicyDigest);
}
[BsonIgnoreExtraElements]
internal sealed class VexProductDocument
{
public string Key { get; set; } = default!;
public string? Name { get; set; }
= null;
public string? Version { get; set; }
= null;
public string? Purl { get; set; }
= null;
public string? Cpe { get; set; }
= null;
public List<string> ComponentIdentifiers { get; set; } = new();
public static VexProductDocument FromDomain(VexProduct product)
=> new()
{
Key = product.Key,
Name = product.Name,
Version = product.Version,
Purl = product.Purl,
Cpe = product.Cpe,
ComponentIdentifiers = product.ComponentIdentifiers.ToList(),
};
public VexProduct ToDomain()
=> new(
Key,
Name,
Version,
Purl,
Cpe,
ComponentIdentifiers);
}
[BsonIgnoreExtraElements]
internal sealed class VexConsensusSourceDocument
{
public string ProviderId { get; set; } = default!;
public string Status { get; set; } = default!;
public string DocumentDigest { get; set; } = default!;
public double Weight { get; set; }
= 0;
public string? Justification { get; set; }
= null;
public string? Detail { get; set; }
= null;
public VexConfidenceDocument? Confidence { get; set; }
= null;
public static VexConsensusSourceDocument FromDomain(VexConsensusSource source)
=> new()
{
ProviderId = source.ProviderId,
Status = source.Status.ToString().ToLowerInvariant(),
DocumentDigest = source.DocumentDigest,
Weight = source.Weight,
Justification = source.Justification?.ToString().ToLowerInvariant(),
Detail = source.Detail,
Confidence = source.Confidence is null ? null : VexConfidenceDocument.FromDomain(source.Confidence),
};
public VexConsensusSource ToDomain()
=> new(
ProviderId,
Enum.Parse<VexClaimStatus>(Status, ignoreCase: true),
DocumentDigest,
Weight,
string.IsNullOrWhiteSpace(Justification) ? null : Enum.Parse<VexJustification>(Justification, ignoreCase: true),
Detail,
Confidence?.ToDomain());
}
[BsonIgnoreExtraElements]
internal sealed class VexConsensusConflictDocument
{
public string ProviderId { get; set; } = default!;
public string Status { get; set; } = default!;
public string DocumentDigest { get; set; } = default!;
public string? Justification { get; set; }
= null;
public string? Detail { get; set; }
= null;
public string? Reason { get; set; }
= null;
public static VexConsensusConflictDocument FromDomain(VexConsensusConflict conflict)
=> new()
{
ProviderId = conflict.ProviderId,
Status = conflict.Status.ToString().ToLowerInvariant(),
DocumentDigest = conflict.DocumentDigest,
Justification = conflict.Justification?.ToString().ToLowerInvariant(),
Detail = conflict.Detail,
Reason = conflict.Reason,
};
public VexConsensusConflict ToDomain()
=> new(
ProviderId,
Enum.Parse<VexClaimStatus>(Status, ignoreCase: true),
DocumentDigest,
string.IsNullOrWhiteSpace(Justification) ? null : Enum.Parse<VexJustification>(Justification, ignoreCase: true),
Detail,
Reason);
}
[BsonIgnoreExtraElements]
internal sealed class VexConfidenceDocument
{
public string Level { get; set; } = default!;
public double? Score { get; set; }
= null;
public string? Method { get; set; }
= null;
public static VexConfidenceDocument FromDomain(VexConfidence confidence)
=> new()
{
Level = confidence.Level,
Score = confidence.Score,
Method = confidence.Method,
};
public VexConfidence ToDomain()
=> new(Level, Score, Method);
}
[BsonIgnoreExtraElements]
internal sealed class VexCacheEntryRecord
{
[BsonId]
public string Id { get; set; } = default!;
public string QuerySignature { get; set; } = default!;
public string Format { get; set; } = default!;
public string ArtifactAlgorithm { get; set; } = default!;
public string ArtifactDigest { get; set; } = default!;
public DateTime CreatedAt { get; set; }
= DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc);
public long SizeBytes { get; set; }
= 0;
public string? ManifestId { get; set; }
= null;
[BsonRepresentation(BsonType.ObjectId)]
public string? GridFsObjectId { get; set; }
= null;
public DateTime? ExpiresAt { get; set; }
= null;
public static string CreateId(VexQuerySignature signature, VexExportFormat format)
=> string.Format(CultureInfo.InvariantCulture, "{0}|{1}", signature.Value, format.ToString().ToLowerInvariant());
public static VexCacheEntryRecord FromDomain(VexCacheEntry entry)
=> new()
{
Id = CreateId(entry.QuerySignature, entry.Format),
QuerySignature = entry.QuerySignature.Value,
Format = entry.Format.ToString().ToLowerInvariant(),
ArtifactAlgorithm = entry.Artifact.Algorithm,
ArtifactDigest = entry.Artifact.Digest,
CreatedAt = entry.CreatedAt.UtcDateTime,
SizeBytes = entry.SizeBytes,
ManifestId = entry.ManifestId,
GridFsObjectId = entry.GridFsObjectId,
ExpiresAt = entry.ExpiresAt?.UtcDateTime,
};
public VexCacheEntry ToDomain()
{
var signature = new VexQuerySignature(QuerySignature);
var artifact = new VexContentAddress(ArtifactAlgorithm, ArtifactDigest);
var createdAt = new DateTimeOffset(DateTime.SpecifyKind(CreatedAt, DateTimeKind.Utc));
var expires = ExpiresAt.HasValue
? new DateTimeOffset(DateTime.SpecifyKind(ExpiresAt.Value, DateTimeKind.Utc))
: (DateTimeOffset?)null;
return new VexCacheEntry(
signature,
Enum.Parse<VexExportFormat>(Format, ignoreCase: true),
artifact,
createdAt,
SizeBytes,
ManifestId,
GridFsObjectId,
expires);
}
}

View File

@@ -0,0 +1,47 @@
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
namespace StellaOps.Vexer.Storage.Mongo;
/// <summary>
/// Configuration controlling Mongo-backed storage for Vexer repositories.
/// </summary>
public sealed class VexMongoStorageOptions : IValidatableObject
{
private const int DefaultInlineThreshold = 256 * 1024;
private static readonly TimeSpan DefaultCacheTtl = TimeSpan.FromHours(12);
/// <summary>
/// Name of the GridFS bucket used for raw VEX payloads that exceed <see cref="GridFsInlineThresholdBytes"/>.
/// </summary>
public string RawBucketName { get; set; } = "vex.raw";
/// <summary>
/// Inline raw document payloads smaller than this threshold; larger payloads are stored in GridFS.
/// </summary>
public int GridFsInlineThresholdBytes { get; set; } = DefaultInlineThreshold;
/// <summary>
/// Default TTL applied to export cache entries (absolute expiration).
/// </summary>
public TimeSpan ExportCacheTtl { get; set; } = DefaultCacheTtl;
public IEnumerable<ValidationResult> Validate(ValidationContext validationContext)
{
if (string.IsNullOrWhiteSpace(RawBucketName))
{
yield return new ValidationResult("Raw bucket name must be provided.", new[] { nameof(RawBucketName) });
}
if (GridFsInlineThresholdBytes < 0)
{
yield return new ValidationResult("GridFS inline threshold must be non-negative.", new[] { nameof(GridFsInlineThresholdBytes) });
}
if (ExportCacheTtl <= TimeSpan.Zero)
{
yield return new ValidationResult("Export cache TTL must be greater than zero.", new[] { nameof(ExportCacheTtl) });
}
}
}

View File

@@ -0,0 +1,105 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Net.Http.Json;
using System.IO;
using Microsoft.AspNetCore.Mvc.Testing;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Mongo2Go;
using MongoDB.Driver;
using StellaOps.Vexer.Attestation.Signing;
using StellaOps.Vexer.Policy;
using StellaOps.Vexer.Core;
using StellaOps.Vexer.Export;
using StellaOps.Vexer.WebService;
namespace StellaOps.Vexer.WebService.Tests;
public sealed class StatusEndpointTests : IClassFixture<WebApplicationFactory<Program>>, IDisposable
{
private readonly WebApplicationFactory<Program> _factory;
private readonly MongoDbRunner _runner;
public StatusEndpointTests(WebApplicationFactory<Program> factory)
{
_runner = MongoDbRunner.Start();
_factory = factory.WithWebHostBuilder(builder =>
{
builder.ConfigureAppConfiguration((_, config) =>
{
var rootPath = Path.Combine(Path.GetTempPath(), "vexer-offline-tests");
Directory.CreateDirectory(rootPath);
var settings = new Dictionary<string, string?>
{
["Vexer:Storage:Mongo:RawBucketName"] = "vex.raw",
["Vexer:Storage:Mongo:GridFsInlineThresholdBytes"] = "256",
["Vexer:Artifacts:FileSystem:RootPath"] = rootPath,
};
config.AddInMemoryCollection(settings!);
});
builder.ConfigureServices(services =>
{
services.AddSingleton<IMongoClient>(_ => new MongoClient(_runner.ConnectionString));
services.AddSingleton(provider => provider.GetRequiredService<IMongoClient>().GetDatabase("vexer-web-tests"));
services.AddSingleton<IVexSigner, FakeSigner>();
services.AddSingleton<IVexPolicyEvaluator, FakePolicyEvaluator>();
services.AddSingleton<IVexExportDataSource, FakeExportDataSource>();
});
});
}
[Fact]
public async Task StatusEndpoint_ReturnsArtifactStores()
{
var client = _factory.CreateClient();
var response = await client.GetAsync("/vexer/status");
var raw = await response.Content.ReadAsStringAsync();
Assert.True(response.IsSuccessStatusCode, raw);
var payload = System.Text.Json.JsonSerializer.Deserialize<StatusResponse>(raw);
Assert.NotNull(payload);
Assert.NotEmpty(payload!.ArtifactStores);
}
public void Dispose()
{
_runner.Dispose();
}
private sealed class StatusResponse
{
public string[] ArtifactStores { get; set; } = Array.Empty<string>();
}
private sealed class FakeSigner : IVexSigner
{
public ValueTask<VexSignedPayload> SignAsync(ReadOnlyMemory<byte> payload, CancellationToken cancellationToken)
=> ValueTask.FromResult(new VexSignedPayload("signature", "key"));
}
private sealed class FakePolicyEvaluator : IVexPolicyEvaluator
{
public string Version => "test";
public VexPolicySnapshot Snapshot => VexPolicySnapshot.Default;
public double GetProviderWeight(VexProvider provider) => 1.0;
public bool IsClaimEligible(VexClaim claim, VexProvider provider, out string? rejectionReason)
{
rejectionReason = null;
return true;
}
}
private sealed class FakeExportDataSource : IVexExportDataSource
{
public ValueTask<VexExportDataSet> FetchAsync(VexQuery query, CancellationToken cancellationToken)
{
var dataset = new VexExportDataSet(ImmutableArray<VexConsensus>.Empty, ImmutableArray<VexClaim>.Empty, ImmutableArray<string>.Empty);
return ValueTask.FromResult(dataset);
}
}
}

View File

@@ -0,0 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Update="Microsoft.AspNetCore.Mvc.Testing" Version="8.0.8" />
<PackageReference Update="Mongo2Go" Version="3.1.3" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Vexer.WebService\StellaOps.Vexer.WebService.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,89 @@
using System.Collections.Generic;
using System.Linq;
using Microsoft.Extensions.Options;
using StellaOps.Vexer.Attestation.Extensions;
using StellaOps.Vexer.Attestation;
using StellaOps.Vexer.Attestation.Transparency;
using StellaOps.Vexer.ArtifactStores.S3.Extensions;
using StellaOps.Vexer.Export;
using StellaOps.Vexer.Storage.Mongo;
var builder = WebApplication.CreateBuilder(args);
var configuration = builder.Configuration;
var services = builder.Services;
services.AddOptions<VexMongoStorageOptions>()
.Bind(configuration.GetSection("Vexer:Storage:Mongo"))
.ValidateOnStart();
services.AddVexerMongoStorage();
services.AddVexExportEngine();
services.AddVexExportCacheServices();
services.AddVexAttestation();
services.Configure<VexAttestationClientOptions>(configuration.GetSection("Vexer:Attestation:Client"));
var rekorSection = configuration.GetSection("Vexer:Attestation:Rekor");
if (rekorSection.Exists())
{
services.AddVexRekorClient(opts => rekorSection.Bind(opts));
}
var fileSystemSection = configuration.GetSection("Vexer:Artifacts:FileSystem");
if (fileSystemSection.Exists())
{
services.AddVexFileSystemArtifactStore(opts => fileSystemSection.Bind(opts));
}
else
{
services.AddVexFileSystemArtifactStore(_ => { });
}
var s3Section = configuration.GetSection("Vexer:Artifacts:S3");
if (s3Section.Exists())
{
services.AddVexS3ArtifactClient(opts => s3Section.GetSection("Client").Bind(opts));
services.AddSingleton<IVexArtifactStore, S3ArtifactStore>(provider =>
{
var options = new S3ArtifactStoreOptions();
s3Section.GetSection("Store").Bind(options);
return new S3ArtifactStore(
provider.GetRequiredService<IS3ArtifactClient>(),
Microsoft.Extensions.Options.Options.Create(options),
provider.GetRequiredService<Microsoft.Extensions.Logging.ILogger<S3ArtifactStore>>());
});
}
var offlineSection = configuration.GetSection("Vexer:Artifacts:OfflineBundle");
if (offlineSection.Exists())
{
services.AddVexOfflineBundleArtifactStore(opts => offlineSection.Bind(opts));
}
services.AddEndpointsApiExplorer();
services.AddHealthChecks();
services.AddSingleton(TimeProvider.System);
var app = builder.Build();
app.MapGet("/vexer/status", async (HttpContext context,
IEnumerable<IVexArtifactStore> artifactStores,
IOptions<VexMongoStorageOptions> mongoOptions,
TimeProvider timeProvider) =>
{
var payload = new StatusResponse(
timeProvider.GetUtcNow(),
mongoOptions.Value.RawBucketName,
mongoOptions.Value.GridFsInlineThresholdBytes,
artifactStores.Select(store => store.GetType().Name).ToArray());
context.Response.ContentType = "application/json";
await System.Text.Json.JsonSerializer.SerializeAsync(context.Response.Body, payload);
});
app.MapHealthChecks("/vexer/health");
app.Run();
public partial class Program;
internal sealed record StatusResponse(DateTimeOffset UtcNow, string MongoBucket, int InlineThreshold, string[] ArtifactStores);

Some files were not shown because too many files have changed in this diff Show More