up
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

This commit is contained in:
Vladimir Moushkov
2025-10-29 19:24:20 +02:00
parent 3154c67978
commit 55464f8498
41 changed files with 2134 additions and 168 deletions

View File

@@ -20,12 +20,14 @@
<package pattern="Microsoft.Extensions.DependencyInjection.Abstractions" /> <package pattern="Microsoft.Extensions.DependencyInjection.Abstractions" />
<package pattern="Microsoft.Extensions.Hosting" /> <package pattern="Microsoft.Extensions.Hosting" />
<package pattern="Microsoft.Extensions.Hosting.Abstractions" /> <package pattern="Microsoft.Extensions.Hosting.Abstractions" />
<package pattern="Microsoft.Extensions.Http" /> <package pattern="Microsoft.Extensions.Http" />
<package pattern="Microsoft.Extensions.Logging.Abstractions" /> <package pattern="Microsoft.Extensions.Logging.Abstractions" />
<package pattern="Microsoft.Extensions.Options" /> <package pattern="Microsoft.Extensions.Options" />
<package pattern="Microsoft.Extensions.Options.ConfigurationExtensions" /> <package pattern="Microsoft.Extensions.Options.ConfigurationExtensions" />
<package pattern="Microsoft.Data.Sqlite" /> <package pattern="Microsoft.Data.Sqlite" />
<package pattern="Microsoft.AspNetCore.Authentication.JwtBearer" /> <package pattern="Microsoft.IdentityModel.Logging" />
<package pattern="Microsoft.IdentityModel.Abstractions" />
<package pattern="Microsoft.AspNetCore.Authentication.JwtBearer" />
<package pattern="Google.Protobuf" /> <package pattern="Google.Protobuf" />
<package pattern="Grpc.*" /> <package pattern="Grpc.*" />
<package pattern="Microsoft.Bcl.AsyncInterfaces" /> <package pattern="Microsoft.Bcl.AsyncInterfaces" />

View File

@@ -1,95 +1,379 @@
# 3 · ProductVision — **StellaOps** # 3 · ProductVision — **StellaOps**
*(v1.3  12Jul2025 · supersedesv1.2; expanded with ecosystem integration, refined metrics, and alignment to emerging trends)*
## 1) Problem Statement & Goals
We ship containers. We need:
- **Authenticity & integrity** of build artifacts and metadata.
- **Provenance** attached to artifacts, not platforms.
- **Transparency** to detect tampering and retroactive edits.
- **Determinism & explainability** so scanner judgments can be replayed and justified.
- **Actionability** to separate theoretical from exploitable risk (VEX).
- **Minimal trust** across multitenant and thirdparty boundaries.
**Nongoals:** Building a new package manager, inventing new SBOM/attestation formats, or depending on closed standards.
--- ---
## 0Preamble ## 2) Golden Path (Minimal EndtoEnd Flow)
This Vision builds on the purpose and gap analysis defined in **01WHY**. ```mermaid
It paints a threeyear “northstar” picture of success for the opensource project and sets the measurable guardrails that every roadmap item must serve, while fostering ecosystem growth and adaptability to trends like SBOM mandates, AIassisted security **and transparent usage quotas**. flowchart LR
A[Source / Image / Rootfs] --> B[SBOM Producer\nCycloneDX 1.6]
B --> C[Signer\nintoto Attestation + DSSE]
C --> D[Transparency\nSigstore Rekor - optional but RECOMMENDED]
D --> E[Durable Storage\nSBOMs, Attestations, Proofs]
E --> F[Scanner\nPkg analyzers + Entrytrace + Layer cache]
F --> G[VEX Authoring\nOpenVEX + SPDX 3.0.1 relationships]
G --> H[Policy Gate\nOPA/Rego: allow/deny + waivers]
H --> I[Artifacts Store\nReports, SARIF, VEX, Audit log]
````
**Adopted standards (pinned for interoperability):**
* **SBOM:** CycloneDX **1.6** (JSON/XML)
* **Attestation & signing:** **intoto Attestations** (Statement + Predicate) in **DSSE** envelopes
* **Transparency:** **Sigstore Rekor** (inclusion proofs, monitoring)
* **Exploitability:** **OpenVEX** (statuses & justifications)
* **Modeling & interop:** **SPDX 3.0.1** (relationships / VEX modeling)
* **Findings interchange (optional):** SARIF for analyzer output
> Pinnings are *policy*, not claims about “latest”. We may update pins via normal change control.
--- ---
## 1NorthStar Vision Statement (2027) ## 3) Security Invariants (What MUST Always Hold)
> *By mid2027, StellaOps is the fastest, mosttrusted selfhosted SBOM scanner. Developers expect vulnerability feedback in **five seconds or less**—even while the free tier enforces a transparent **{{ quota_token }} scans/day** limit with graceful waiting. The project thrives on a vibrant plugin marketplace, weekly community releases, transparent governance, and seamless integrations with major CI/CD ecosystems—while never breaking the fivesecond promise.* 1. **Artifact identity is contentaddressed.**
* All identities are SHA256 digests of immutable blobs (images, SBOMs, attestations).
2. **Every SBOM is signed.**
* SBOMs MUST be wrapped in **intoto DSSE** attestations tied to the container digest.
3. **Provenance is attached, not implied.**
* Build metadata (who/where/how) MUST ride as attestations linked by digest.
4. **Transparency FIRST mindset.**
* Signatures/attestations SHOULD be logged to **Rekor** and store inclusion proofs.
5. **Determinism & replay.**
* Scans MUST be reproducible given: input digests, scanner version, DB snapshot, and config.
6. **Explainability.**
* Findings MUST show the *why*: package → file path → callstack / entrypoint (when available).
7. **Exploitability over enumeration.**
* Risk MUST be communicated via **VEX** (OpenVEX), including **under_investigation** where appropriate.
8. **Least privilege & minimal trust.**
* Build keys are shortlived; scanners run on ephemeral, leastprivileged workers.
9. **Airgap friendly.**
* Mirrors for vuln DBs and containers; all verification MUST work without public egress.
10. **No hidden blockers.**
* Policy gates MUST be codereviewable (e.g., Rego) and auditable; waivers are attestations, not emails.
--- ---
## 2Outcomes & Success Metrics ## 4) Trust Boundaries & Roles
| KPI (communitycentric) | Baseline Jul2025 | Target Q22026 | NorthStar 2027 | <!-- ```mermaid
| -------------------------------- | ----------------- | -------------- | --------------- | flowchart TB
| ⭐Gitea /GitHub stars | 0 | 4000 | 10000 | subgraph DevTenant[Dev Tenant]
| Weekly active Docker pulls | 0 | 1500 | 4000 | SRC[Source Code]
| P95 SBOM scan time (alpine) | 5s | **5s** | **4s** | CI[CI Runner]
| Freetier scan satisfaction* | n/a | ≥90% | ≥95% | end
| Firsttimecontributor PRs /qtr | 0 | 15 | 30 | subgraph SecPlatform[Security Platform]
SB[SBOM Service]
AT[Attestation Service]
TR[Transparency Client]
SCN[Scanner Pool]
POL[Policy Gate]
ST[Artifacts Store]
end
subgraph External[External/3rdparty]
REG[Container Registry]
REK[Rekor]
end
\*Measured via anonymous telemetry *optin only*: ratio of successful scans to `429 QuotaExceeded` errors. SRC --> CI
CI -->|image digest| REG
REG -->|pull by digest| SB
SB --> AT --> TR --> REK
AT --> ST
REK --> ST
ST --> SCN --> POL --> ST
``` -->
* **Build/CI:** Holds signing capability (shortlived keys or keyless signing).
* **Registry:** Source of truth for image bytes; access via digest only.
* **Scanner Pool:** Ephemeral nodes; contentaddressed caches; no shared mutable state.
* **Artifacts Store:** Immutable, WORMlike storage for SBOMs, attestations, proofs, SARIF, VEX.
--- ---
## 3Strategic Pillars ## 5) Data & Evidence We Persist
1. **SpeedFirst** preserve the sub5s P95 walltime; any feature that hurts it must ship behind a toggle or plugin. **Quota throttling must apply a soft 5s delay first, so “speed first” remains true even at the limit.** | Artifact | MUST Persist | Why |
2. **OfflinebyDesign** every byte required to scan ships in public images; Internet access is optional. | -------------------- | ------------------------------------ | ---------------------------- |
3. **ModularForever** capabilities land as hotload plugins; the monolith can split without rewrites. | SBOM (CycloneDX 1.6) | Raw file + DSSE attestation | Reproducibility, audit |
4. **CommunityOwnership** ADRs and governance decisions live in public; new maintainers elected by meritocracy. | intoto Statement | Full JSON | Traceability |
5. **ZeroSurprise Upgrades & Limits** SemVer discipline; `main` is always installable; minor upgrades never break CI YAML **and freetier limits are clearly documented, with early UI warnings.** | Rekor entry | UUID + inclusion proof | Tamperevidence |
6. **Ecosystem Harmony** Prioritise integrations with popular OSS tools (e.g., Trivy extensions, BuildKit hooks) to lower adoption barriers. | Scanner output | SARIF + raw notes | Triage & tooling interop |
| VEX | OpenVEX + links to findings | Noise reduction & compliance |
| Policy decisions | Input set + decision + rule versions | Governance & forensics |
Retention follows our Compliance policy; default **≥ 18 months**.
--- ---
## 4Roadmap Themes (1824months) ## 6) Scanner Requirements (Determinism & Explainability)
| Horizon | Theme | Example EPIC | * **Inputs pinned:** image digest(s), SBOM(s), scanner version, vuln DB snapshot date, config hash.
| ------------------ | ----------------------- | ---------------------------------------------------------------------------------------------------------------------------------- | * **Explainability:** show file paths, package coords (e.g., purl), and—when possible—**entrytrace/callstack** from executable entrypoints to vulnerable symbol(s).
| **Q32025** (3mo) | **Core Stability & UX** | Onecommand installer; darkmode UI; baseline SBOM scanning; **Freetier Quota Service ({{ quota_token }} scans/day, early banner, waitwall).** | * **Caching:** contentaddressed perlayer & perecosystem caches; warming does not change decisions.
| 612mo | *Extensibility* | Scanservice microsplit PoC; community plugin marketplace beta. | * **Unknowns:** output **under_investigation** where exploitability is not yet known; roll into VEX.
| 1218mo | *Ecosystem* | Community plugin marketplace launch; integrations with Syft and Harbor. | * **Interchange:** emit **SARIF** for IDE and pipeline consumption (optional but recommended).
| 1824mo | *Resilience & Scale* | Redis Cluster autosharding; AIassisted triage plugin framework. |
*(Granular decomposition lives in 25_LEDGER.md.)
--- ---
## 5Stakeholder Personas & Benefits ## 7) Policy Gate (OPA/Rego) — Examples
| Persona | Core Benefit | > Gate runs after scan + VEX merge. It treats VEX as firstclass input.
| --------------------- | ---------------------------------------------------------------- |
| Solo OSS maintainer | Laptop scans in **5s**; zero cloud reliance. |
| CI Platform Engineer | Singlebinary backend + Redis; stable YAML integrations. |
| Security Auditor | AGPL code, traceable CVE sources, reproducible benchmarks. |
| Community Contributor | Plugin hooks and goodfirst issues; meritbased maintainer path. |
| Budgetconscious Lead | Clear **{{ quota_token }} scans/day** allowance before upgrades are required. |
(See **01WHY §3** for detailed painpoints & evidence.) ### 7.1 Deny unreconciled criticals that are exploitable
```rego
package stella.policy
default allow := false
exploitable(v) {
v.severity == "CRITICAL"
v.exploitability == "affected"
}
allow {
not exploitable_some
}
exploitable_some {
some v in input.findings
exploitable(v)
not waived(v.id)
}
waived(id) {
some w in input.vex
w.vuln_id == id
w.status == "not_affected"
w.justification != ""
}
```
### 7.2 Require Rekor inclusion for attestations
```rego
package stella.policy
violation[msg] {
some a in input.attestations
not a.rekor.inclusion_proof
msg := sprintf("Attestation %s lacks Rekor inclusion proof", [a.id])
}
```
--- ---
## 6NonGoals (20252027) ## 8) Version Pins & Compatibility
* Multitenant SaaS offering. | Domain | Standard | Stella Pin | Notes |
* Automated “fix PR” generation. | ------------ | -------------- | ---------------- | ------------------------------------------------ |
* Proprietary compliance certifications (left to downstream distros). | SBOM | CycloneDX | **1.6** | JSON or XML accepted; JSON preferred |
* Windows **container** scanning (agents only). | Attestation | intoto | **Statement v1** | Predicates per use case (e.g., sbom, provenance) |
| Envelope | DSSE | **v1** | Canonical JSON payloads |
| Transparency | Sigstore Rekor | **API stable** | Inclusion proof stored alongside artifacts |
| VEX | OpenVEX | **spec current** | Map to SPDX 3.0.1 relationships as needed |
| Interop | SPDX | **3.0.1** | Use for modeling & crossecosystem exchange |
| Findings | SARIF | **2.1.0** | Optional but recommended |
--- ---
## 7Review & Change Process ## 9) Minimal CLI Playbook (Illustrative)
* **Cadence:** product owner leads a public Vision review every **2 sprints (≈1quarter)**. > Commands below are illustrative; wire them into CI with shortlived credentials.
* **Amendments:** material changes require PR labelled `type:vision` + two maintainer approvals.
* **Versioning:** bump patch for typo, minor for KPI tweak, major if NorthStar statement shifts. ```bash
* **Community Feedback:** Open GitHub Discussions for input; incorporate topvoted suggestions quarterly. # 1) Produce SBOM (CycloneDX 1.6) from image digest
syft registry:5000/myimg@sha256:... -o cyclonedx-json > sbom.cdx.json
# 2) Create intoto DSSE attestation bound to the image digest
cosign attest --predicate sbom.cdx.json \
--type https://stella-ops.org/attestations/sbom/1 \
--key env://COSIGN_KEY \
registry:5000/myimg@sha256:...
# 3) (Optional but recommended) Rekor transparency
cosign sign --key env://COSIGN_KEY registry:5000/myimg@sha256:...
cosign verify-attestation --type ... --certificate-oidc-issuer https://token.actions... registry:5000/myimg@sha256:... > rekor-proof.json
# 4) Scan (pinned DB snapshot)
stella-scan --image registry:5000/myimg@sha256:... \
--sbom sbom.cdx.json \
--db-snapshot 2025-10-01 \
--out findings.sarif
# 5) Emit VEX
stella-vex --from findings.sarif --policy vex-policy.yaml --out vex.json
# 6) Gate
opa eval -i gate-input.json -d policy/ -f pretty "data.stella.policy.allow"
```
--- ---
## 10) JSON Skeletons (CopyReady)
### 10.1 intoto Statement (DSSE payload)
```json
{
"_type": "https://in-toto.io/Statement/v1",
"subject": [
{
"name": "registry:5000/myimg",
"digest": { "sha256": "IMAGE_DIGEST_SHA256" }
}
],
"predicateType": "https://stella-ops.org/attestations/sbom/1",
"predicate": {
"sbomFormat": "CycloneDX",
"sbomVersion": "1.6",
"mediaType": "application/vnd.cyclonedx+json",
"location": "sha256:SBOM_BLOB_SHA256"
}
}
```
### 10.2 DSSE Envelope (wrapping the Statement)
```json
{
"payloadType": "application/vnd.in-toto+json",
"payload": "BASE64URL_OF_CANONICAL_STATEMENT_JSON",
"signatures": [
{
"keyid": "KEY_ID_OR_CERT_ID",
"sig": "BASE64URL_SIGNATURE"
}
]
}
```
### 10.3 OpenVEX (compact)
```json
{
"@context": "https://openvex.dev/ns/v0.2.0",
"author": "Stella Ops Security",
"timestamp": "2025-10-29T00:00:00Z",
"statements": [
{
"vulnerability": "CVE-2025-0001",
"products": ["pkg:purl/example@1.2.3?arch=amd64"],
"status": "under_investigation",
"justification": "analysis_ongoing",
"timestamp": "2025-10-29T00:00:00Z"
}
]
}
```
---
## 11) Handling “Unknowns” & Noise
* Use **OpenVEX** statuses: `affected`, `not_affected`, `fixed`, `under_investigation`.
* Prefer **justifications** over freetext.
* Timebound **waivers** are modeled as VEX with `not_affected` + justification or `affected` + compensating controls.
* Dashboards MUST surface counts separately for `under_investigation` so risk is visible.
---
## 12) Operational Guidance
**Key management**
* Use **ephemeral OIDC** or shortlived keys (HSM/KMS bound).
* Rotate signer identities at least quarterly; no shared longterm keys in CI.
**Caching & performance**
* Layer caches keyed by digest + analyzer version.
* Prewarm vuln DB snapshots; mirror into airgapped envs.
**Multitenancy**
* Strict tenant isolation for storage and compute.
* Ratelimit and bound memory/CPU per scan job.
**Auditing**
* Every decision is a record: inputs, versions, rule commit, actor, result.
* Preserve Rekor inclusion proofs with the attestation record.
---
## 13) Exceptions Process (Breakglass)
1. Open a tracked exception with: artifact digest, CVE(s), business justification, expiry.
2. Generate VEX entry reflecting the exception (`not_affected` with justification or `affected` with compensating controls).
3. Merge into policy inputs; **policy MUST read VEX**, not tickets.
4. Rereview before expiry; exceptions cannot autorenew.
---
## 14) Threat Model (Abbreviated)
* **Tampering**: modified SBOMs/attestations → mitigated by DSSE + Rekor + WORM storage.
* **Confused deputy**: scanning a different image → mitigated by digestonly pulls and subject digests in attestations.
* **TOCTOU / retagging**: registry tags drift → mitigated by digest pinning everywhere.
* **Scanner poisoning**: unpinned DBs → mitigated by snapshotting and recording version/date.
* **Key compromise**: longlived CI keys → mitigated by OIDC keyless or shortlived KMS keys.
---
## 15) Implementation Checklist
* [ ] SBOM producer emits CycloneDX 1.6; bound to image digest.
* [ ] intoto+DSSE signing wired in CI; Rekor logging enabled.
* [ ] Durable artifact store with WORM semantics.
* [ ] Scanner produces explainable findings; SARIF optional.
* [ ] OpenVEX emitted and archived; linked to findings & image.
* [ ] Policy gate enforced; waivers modeled as VEX; decisions logged.
* [ ] Airgap mirrors for registry and vuln DBs.
* [ ] Runbooks for key rotation, Rekor outage, and database rollback.
---
## 16) Glossary
* **SBOM**: Software Bill of Materials describing packages/components within an artifact.
* **Attestation**: Signed statement binding facts (predicate) to a subject (artifact) using intoto.
* **DSSE**: Envelope that signs the canonical payload detached from transport.
* **Transparency Log**: Appendonly log (e.g., Rekor) giving inclusion and temporal proofs.
* **VEX**: Vulnerability Exploitability eXchange expressing exploitability status & justification.
---
## 8·Change Log ## 8·Change Log
| Version | Date | Note (highlevel) | | Version | Date | Note (highlevel) |
| ------- | ----------- | ----------------------------------------------------------------------------------------------------- | | ------- | ----------- | ----------------------------------------------------------------------------------------------------- |
| v1.4 | 29-Oct-2025 | Initial principles, golden path, policy examples, and JSON skeletons. |
| v1.4 | 14Jul2025 | First public revision reflecting quarterly roadmap & KPI baseline. | | v1.4 | 14Jul2025 | First public revision reflecting quarterly roadmap & KPI baseline. |
| v1.3 | 12Jul2025 | Expanded ecosystem pillar, added metrics/integrations, refined non-goals, community persona/feedback. | | v1.3 | 12Jul2025 | Expanded ecosystem pillar, added metrics/integrations, refined non-goals, community persona/feedback. |
| v1.2 | 11Jul2025 | Restructured to link with WHY; merged principles into StrategicPillars; added review §7 | | v1.2 | 11Jul2025 | Restructured to link with WHY; merged principles into StrategicPillars; added review §7 |

View File

@@ -0,0 +1,49 @@
# Cartographer Graph Handshake Plan
_Status: 2025-10-29_
## Why this exists
The Concelier/Excititor graph enrichment work (CONCELIER-GRAPH-21-001/002, EXCITITOR-GRAPH-21-001/002/005) and the merge-side coordination tasks (FEEDMERGE-COORD-02-901/902) are blocked on a clear contract with Cartographer and the Policy Engine. This document captures the minimum artefacts each guild owes so we can unblock the graph pipeline and resume implementation without re-scoping every stand-up.
## Deliverables by guild
### Cartographer Guild
- **CARTO-GRAPH-21-002** (Inspector contract): publish the inspector payload schema (`graph.inspect.v1`) including the fields Cartographer needs from Concelier/Excititor (SBOM relationships, advisory/VEX linkouts, justification summaries). Target format: shared Proto/JSON schema stored under `src/Cartographer/Contracts/`.
- **CARTO-GRAPH-21-005** (Inspector access patterns): document the query shapes Cartographer will execute (PURL → advisory, PURL → VEX statement, policy scope filters) so storage can project the right indexes/materialized views. Include sample `mongosh` queries and desired TTL/limit behaviour.
- Provide a test harness (e.g., Postman collection or integration fixture) Cartographer will use to validate the Concelier/Excititor endpoints once they land.
### Concelier Core Guild
- Derive adjacency data from SBOM normalization as described in CONCELIER-GRAPH-21-001 (depends on `CONCELIER-POLICY-20-002`). Once Cartographer publishes the schema above, implement:
- Node payloads: component metadata, scopes, entrypoint annotations.
- Edge payloads: `contains`, `depends_on`, `provides`, provenance array.
- **Change events (CONCELIER-GRAPH-21-002)**: define `sbom.relationship.changed` event contract with tenant + context metadata, referencing Cartographers filter requirements. Include event samples and replay instructions in `docs/graph/concelier-events.md`.
- Coordinate with Cartographer on pagination/streaming expectations (page size, continuation token, retention window).
### Excititor Core & Storage Guilds
- **Inspector linkouts (EXCITITOR-GRAPH-21-001)**: expose Batched VEX/advisory lookup endpoint that accepts graph node PURLs and responds with raw document slices + justification metadata. Ensure Policy Engine scope enrichment (EXCITITOR-POLICY-20-002) feeds this response so Cartographer does not need to call multiple services.
- **Overlay enrichment (EXCITITOR-GRAPH-21-002)**: align the overlay metadata with Cartographers schema once it lands (include justification summaries, document versions, and provenance).
- **Indexes/materialized views (EXCITITOR-GRAPH-21-005)**: after Cartographer publishes query shapes, create the necessary indexes (PURL + tenant, policy scope) and document migrations in storage runbooks. Provide load testing evidence before enabling in production.
### Policy Guild
- **CONCELIER-POLICY-20-002**: publish the enriched linkset schema that powers both Concelier and Excititor payloads. Include enumerations for relationship types and scope tags.
- Share the Policy Engine timeline for policy overlay metadata (`POLICY-ENGINE-30-001`) so Excititor can plan the overlay enrichment delivery.
## Shared action items
| Owner | Task | Deadline | Notes |
|-------|------|----------|-------|
| Cartographer | Publish inspector schema + query patterns (`CARTO-GRAPH-21-002`/`21-005`) | 2025-11-04 | Attach schema files + examples to this doc once merged. |
| Concelier Core | Draft change-event payload with sample JSON | 2025-11-06 | Blocked until Cartographer schema lands; prepare skeleton PR in `docs/graph/concelier-events.md`. |
| Excititor Core/Storage | Prototype batch linkout API + index design doc | 2025-11-07 | Leverage Cartographer query patterns to size indexes; include perf targets. |
| Policy Guild | Confirm linkset enrichment fields + overlay timeline | 2025-11-05 | Needed to unblock both Concelier enrichment and Excititor overlay tasks. |
## Reporting
- Track progress in the `#cartographer-handshake` Slack thread (create once Cartographer posts the schema MR).
- During the twice-weekly graph sync, review outstanding checklist items above and update the task notes (`TASKS.md`) so the backlog reflects real-time status.
- Once the schema and query contracts are merged, the Concelier/Excititor teams can flip their tasks from **BLOCKED** to **DOING** and attach implementation plans referencing this document.
## Appendix: references
- `CONCELIER-GRAPH-21-001`, `CONCELIER-GRAPH-21-002` (Concelier Core task board)
- `EXCITITOR-GRAPH-21-001`, `EXCITITOR-GRAPH-21-002`, `EXCITITOR-GRAPH-21-005` (Excititor Core/Storage task boards)
- `CARTO-GRAPH-21-002`, `CARTO-GRAPH-21-005` (Cartographer task board)
- `POLICY-ENGINE-30-001`, `CONCELIER-POLICY-20-002`, `EXCITITOR-POLICY-20-002` (Policy Engine roadmap)

View File

@@ -0,0 +1,37 @@
# Java Analyzer Observation Writer Plan
_Status: 2025-10-29_
SCANNER-ANALYZERS-JAVA-21-008 (resolver + AOC writer) is blocked by upstream heuristics that need to settle before we can emit observation JSON. This note itemises the remaining work so the analyzer guild can sequence delivery without re-opening design discussions in every stand-up.
## Prerequisite summary
- **SCANNER-ANALYZERS-JAVA-21-004** (reflection / dynamic loader heuristics) must emit normalized reflection edges with confidence + call-site metadata. Outstanding items: TCCL coverage for servlet containers and resource-based plugin hints. Owners: Java Analyzer Guild.
- **SCANNER-ANALYZERS-JAVA-21-005** (framework config extraction) required to surface Spring/Jakarta entrypoints that feed observation entrypoint metadata. Add YAML/property parsing fixtures and document reason codes (`config-spring`, `config-jaxrs`, etc.).
- **SCANNER-ANALYZERS-JAVA-21-006** (JNI/native hints) optional but highly recommended before observation writer so JNI edges land alongside static ones. Coordinate with native analyzer on reason codes.
- **Advisory core** ensure AOC writer schema (`JavaObservation.json`) is frozen before we serialise to avoid churn downstream.
## Deliverables for SCANNER-ANALYZERS-JAVA-21-008
1. **Observation projection (`JavaObservationWriter`)**
- Inputs: normalised workspace + analyzer outputs (classpath graph, SPI table, reflection edges, config hints, JNI hints).
- Outputs: deterministic JSON containing entrypoints, components, edges, warnings, provenance. Align with `docs/aoc/java-observation-schema.md` once published.
2. **AOC guard integration**
- Serialize observation documents through `Scanner.Aoc` guard pipeline; add unit tests covering required fields and forbidden derived data.
3. **Fixture updates**
- Expand `fixtures/lang/java/` set to include reflection-heavy app, Spring Boot sample, JNI sample, modular app. Record golden outputs with `UPDATE_JAVA_FIXTURES=1`.
4. **Metrics & logging**
- Emit counters (`scanner.java.observation.edges_total`, etc.) to trace observation completeness during CI runs.
5. **Documentation**
- Update `docs/scanner/java-analyzer.md` with reason code matrix and observation field definitions.
## Action items
| Owner | Task | Due | Notes |
|-------|------|-----|-------|
| Java Analyzer Guild | Land reflection TODOs (TCCL + resource plugin hints) | 2025-11-01 | Required for reliable dynamic edges. |
| Java Analyzer Guild | Finish config extractor for Spring/Jakarta | 2025-11-02 | Use sample apps in `fixtures/lang/java/config-*`. |
| Java Analyzer Guild | Draft observation writer spike PR using new schema | 2025-11-04 | PR can be draft but should include JSON schema + sample. |
| Scanner AOC Owners | Validate observation JSON against AOC guard + schema | 2025-11-05 | Blocker for marking 21-008 as DOING. |
| QA Guild | Prepare regression harness + performance gate (<300ms per fat jar) | 2025-11-06 | Align with SCANNER-ANALYZERS-JAVA-21-009. |
## Reporting
- Track these checkpoints in the Java analyzer weekly sync; once prerequisites are green, flip SCANNER-ANALYZERS-JAVA-21-008 to **DOING**.
- Store schema and sample output under `docs/scanner/java-observations/` so AOC reviewers have a stable reference.

View File

@@ -0,0 +1,94 @@
# Normalized Version Rule Recipes
_Status: 2025-10-29_
This guide captures the minimum wiring required for connectors and Merge coordination tasks to finish the normalized-version rollout that unblocks FEEDMERGE-COORD-02-9xx.
## 1. Quick-start checklist
1. Ensure your mapper already emits `AffectedPackage.VersionRanges` (SemVer, NEVRA, EVR). If you only have vendor/product strings, capture the raw range text before trimming so it can feed the helper.
2. Call `SemVerRangeRuleBuilder.BuildNormalizedRules(rawRange, patchedVersion, provenance)` for each range and place the result in `AffectedPackage.NormalizedVersions`.
3. Set a provenance note in the format `connector:{advisoryId}:{index}` so Merge can differentiate connector-provided rules from canonical fallbacks.
4. Verify with `dotnet test` that the connector snapshot fixtures now include the `normalizedVersions` array and update fixtures by setting the connector-specific `UPDATE_*_FIXTURES=1` environment variable.
5. Tail Merge logs (or the test output) for the new warning `Normalized version rules missing for {AdvisoryKey}`; an empty warning stream means the connector/merge artefacts are ready to close FEEDMERGE-COORD-02-901/902.
## 2. Code snippet: SemVer connector (CCCS/Cisco/ICS-CISA)
```csharp
using StellaOps.Concelier.Normalization.SemVer;
private static IReadOnlyList<AffectedPackage> BuildPackages(MyDto dto, DateTimeOffset recordedAt)
{
var packages = new List<AffectedPackage>();
foreach (var entry in dto.AffectedEntries.Select((value, index) => (value, index)))
{
var rangeText = entry.value.Range?.Trim();
var patched = entry.value.FixedVersion;
var provenance = $"{MyConnectorPlugin.SourceName}:{dto.AdvisoryId}:{entry.index}";
var normalizedRules = SemVerRangeRuleBuilder.BuildNormalizedRules(rangeText, patched, provenance);
var primitives = SemVerRangeRuleBuilder.Build(rangeText, patched, provenance)
.Select(result => result.Primitive.ToAffectedVersionRange(provenance))
.ToArray();
packages.Add(new AffectedPackage(
AffectedPackageTypes.SemVer,
entry.value.PackageId,
versionRanges: primitives,
normalizedVersions: normalizedRules,
provenance: new[]
{
new AdvisoryProvenance(
MyConnectorPlugin.SourceName,
"package",
entry.value.PackageId,
recordedAt,
new[] { ProvenanceFieldMasks.AffectedPackages })
}));
}
return packages;
}
```
A few notes:
- If you already have `SemVerPrimitive` instances, call `.ToNormalizedVersionRule(provenance)` on each primitive instead of rebuilding from raw strings.
- Use `SemVerRangeRuleBuilder.BuildNormalizedRules` when the connector only tracks raw range text plus an optional fixed/patched version.
- For products that encode ranges like `"ExampleOS 4.12 - 4.14"`, run a small regex to peel off the version substring (see §3) and use the same provenance note when emitting the rule and the original range primitive.
## 3. Parsing helper for trailing version phrases
Many of the overdue connectors store affected products as natural-language phrases. The following helper normalises common patterns (`1.2 - 1.4`, `<= 3.5`, `Version 7.2 and later`).
```csharp
private static string? TryExtractRangeSuffix(string productString)
{
if (string.IsNullOrWhiteSpace(productString))
{
return null;
}
var match = Regex.Match(productString, "(?<range>(?:<=?|>=?)?\s*\d+(?:\.\d+){0,2}(?:\s*-\s*\d+(?:\.\d+){0,2})?)", RegexOptions.CultureInvariant);
return match.Success ? match.Groups["range"].Value.Trim() : null;
}
```
Once you extract the `range` fragment, feed it to `SemVerRangeRuleBuilder.BuildNormalizedRules(range, null, provenance)`. Keep the original product string as-is so operators can still see the descriptive text.
## 4. Merge dashboard hygiene
- Run `dotnet build src/Concelier/__Libraries/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj` after wiring a connector to confirm no warnings appear.
- Merge counter tag pairs to watch in Grafana/CI logs:
- `concelier.merge.normalized_rules{package_type="npm"}` increases once the connector emits normalized arrays.
- `concelier.merge.normalized_rules_missing{package_type="vendor"}` should trend to zero once rollout completes.
- The Merge service now logs `Normalized version rules missing for {AdvisoryKey}; sources=...; packageTypes=...` when a connector still needs to supply normalized rules. Use this as the acceptance gate for FEEDMERGE-COORD-02-901/902.
## 5. Documentation touchpoints
- Update the connector `TASKS.md` entry with the date you flipped on normalized rules and note the provenance format you chose.
- Record any locale-specific parsing (e.g., German `bis`) in the connector README so future contributors can regenerate fixtures confidently.
- When opening the PR, include `dotnet test` output covering the connector tests so reviewers see the normalized array diff.
Once each connector follows the steps above, we can mark FEEDCONN-CCCS-02-009, FEEDCONN-CISCO-02-009, FEEDCONN-CERTBUND-02-010, FEEDCONN-ICSCISA-02-012, and the FEEDMERGE-COORD-02-90x tasks as resolved.

View File

@@ -50,7 +50,7 @@
| `content.format` | string | Source format (`CSAF`, `OSV`, etc.). | | `content.format` | string | Source format (`CSAF`, `OSV`, etc.). |
| `content.spec_version` | string | Upstream spec version when known. | | `content.spec_version` | string | Upstream spec version when known. |
| `content.raw` | object | Full upstream payload, untouched except for transport normalisation. | | `content.raw` | object | Full upstream payload, untouched except for transport normalisation. |
| `identifiers` | object | Normalised identifiers (`cve`, `ghsa`, `aliases`, etc.) derived losslessly from raw content. | | `identifiers` | object | Upstream identifiers (`cve`, `ghsa`, `aliases`, etc.) captured as provided (trimmed, order preserved, duplicates allowed). |
| `linkset` | object | Join hints (see section 4.3). | | `linkset` | object | Join hints (see section 4.3). |
| `supersedes` | string or null | Points to previous revision of same upstream doc when content hash changes. | | `supersedes` | string or null | Points to previous revision of same upstream doc when content hash changes. |
@@ -77,8 +77,9 @@
- `reconciled_from`: Provenance of linkset entries (JSON Pointer or field origin) to make automated checks auditable. - `reconciled_from`: Provenance of linkset entries (JSON Pointer or field origin) to make automated checks auditable.
Canonicalisation rules: Canonicalisation rules:
- Package URLs are rendered in canonical form without qualifiers/subpaths (`pkg:type/namespace/name@version`). - Package URLs are rendered in canonical form without qualifiers/subpaths (`pkg:type/namespace/name@version`).
- CPE values are normalised to the 2.3 binding (`cpe:2.3:part:vendor:product:version:*:*:*:*:*:*:*`). - CPE values are normalised to the 2.3 binding (`cpe:2.3:part:vendor:product:version:*:*:*:*:*:*:*`).
- Connector mapping stages are responsible for the canonical form; ingestion trims whitespace but otherwise preserves the original order and duplicate entries so downstream policy can reason about upstream intent.
### 4.4 `advisory_observations` ### 4.4 `advisory_observations`
@@ -99,10 +100,10 @@ Canonicalisation rules:
| `content.format` / `content.specVersion` | string | Raw payload format metadata (CSAF, OSV, JSON, etc.). | | `content.format` / `content.specVersion` | string | Raw payload format metadata (CSAF, OSV, JSON, etc.). |
| `content.raw` | object | Full upstream document stored losslessly (Relaxed Extended JSON). | | `content.raw` | object | Full upstream document stored losslessly (Relaxed Extended JSON). |
| `content.metadata` | object | Optional connector-specific metadata (batch ids, hints). | | `content.metadata` | object | Optional connector-specific metadata (batch ids, hints). |
| `linkset.aliases` | array | Normalized aliases (lower-case, sorted). | | `linkset.aliases` | array | Connector-supplied aliases (trimmed, order preserved, duplicates allowed). |
| `linkset.purls` | array | Normalized PURLs extracted from the document. | | `linkset.purls` | array | Connector-supplied PURLs (ingestion preserves order and duplicates). |
| `linkset.cpes` | array | Normalized CPE URIs. | | `linkset.cpes` | array | Connector-supplied CPE URIs (trimmed, order preserved). |
| `linkset.references` | array | `{ type, url }` pairs (type lower-case). | | `linkset.references` | array | `{ type, url }` pairs (trimmed; ingestion preserves order). |
| `createdAt` | datetime | Timestamp when Concelier persisted the observation. | | `createdAt` | datetime | Timestamp when Concelier persisted the observation. |
| `attributes` | object | Optional provenance attributes keyed by connector. | | `attributes` | object | Optional provenance attributes keyed by connector. |

View File

@@ -0,0 +1,14 @@
#!/usr/bin/env bash
set -euo pipefail
repo_root=$(git rev-parse --show-toplevel)
project="$repo_root/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests.csproj"
if [[ ! -f "$project" ]]; then
echo "Red Hat connector test project not found at $project" >&2
exit 1
fi
export UPDATE_GOLDENS=1
# Disable shared Concelier test infra so the test project can restore standalone packages.
dotnet test "$project" -p:UseConcelierTestInfra=false "$@"

View File

@@ -1,6 +1,7 @@
using System; using System;
using System.Diagnostics; using System.Diagnostics;
using System.Globalization; using System.Globalization;
using System.IO;
using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Configuration;
@@ -28,16 +29,17 @@ using StellaOps.Authority.Storage.Mongo.Initialization;
using StellaOps.Authority.Storage.Mongo.Stores; using StellaOps.Authority.Storage.Mongo.Stores;
using StellaOps.Authority.RateLimiting; using StellaOps.Authority.RateLimiting;
using StellaOps.Configuration; using StellaOps.Configuration;
using StellaOps.Plugin.DependencyInjection; using StellaOps.Plugin.DependencyInjection;
using StellaOps.Plugin.Hosting; using StellaOps.Plugin.Hosting;
using StellaOps.Authority.OpenIddict.Handlers; using StellaOps.Authority.OpenIddict.Handlers;
using System.Linq; using System.Linq;
using StellaOps.Cryptography.Audit; using StellaOps.Cryptography.Audit;
using StellaOps.Cryptography.DependencyInjection; using StellaOps.Cryptography.DependencyInjection;
using StellaOps.Authority.Permalinks; using StellaOps.Authority.Permalinks;
using StellaOps.Authority.Revocation; using StellaOps.Authority.Revocation;
using StellaOps.Authority.Signing; using StellaOps.Authority.Signing;
using StellaOps.Cryptography; using StellaOps.Cryptography;
using StellaOps.Cryptography.Kms;
using StellaOps.Authority.Storage.Mongo.Documents; using StellaOps.Authority.Storage.Mongo.Documents;
using StellaOps.Authority.Security; using StellaOps.Authority.Security;
using StellaOps.Auth.Abstractions; using StellaOps.Auth.Abstractions;
@@ -162,15 +164,36 @@ else
builder.Services.AddScoped<ValidateDpopProofHandler>(); builder.Services.AddScoped<ValidateDpopProofHandler>();
#endif #endif
builder.Services.AddRateLimiter(rateLimiterOptions => builder.Services.AddRateLimiter(rateLimiterOptions =>
{ {
AuthorityRateLimiter.Configure(rateLimiterOptions, authorityOptions); AuthorityRateLimiter.Configure(rateLimiterOptions, authorityOptions);
}); });
builder.Services.AddStellaOpsCrypto(); var requiresKms = string.Equals(authorityOptions.Signing.KeySource, "kms", StringComparison.OrdinalIgnoreCase)
builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton<IAuthoritySigningKeySource, FileAuthoritySigningKeySource>()); || authorityOptions.Signing.AdditionalKeys.Any(k => string.Equals(k.Source, "kms", StringComparison.OrdinalIgnoreCase));
builder.Services.AddSingleton<AuthoritySigningKeyManager>();
builder.Services.AddSingleton<VulnPermalinkService>(); if (requiresKms)
{
if (string.IsNullOrWhiteSpace(authorityOptions.Signing.KeyPassphrase))
{
throw new InvalidOperationException("Authority signing with source 'kms' requires signing.keyPassphrase to be configured.");
}
var kmsRoot = Path.Combine(builder.Environment.ContentRootPath, "kms");
builder.Services.AddFileKms(options =>
{
options.RootPath = kmsRoot;
options.Password = authorityOptions.Signing.KeyPassphrase!;
options.Algorithm = authorityOptions.Signing.Algorithm;
});
builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton<IAuthoritySigningKeySource, KmsAuthoritySigningKeySource>());
}
builder.Services.AddStellaOpsCrypto();
builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton<IAuthoritySigningKeySource, FileAuthoritySigningKeySource>());
builder.Services.AddSingleton<AuthoritySigningKeyManager>();
builder.Services.AddSingleton<VulnPermalinkService>();
AuthorityPluginContext[] pluginContexts = AuthorityPluginConfigurationLoader AuthorityPluginContext[] pluginContexts = AuthorityPluginConfigurationLoader
.Load(authorityOptions, builder.Environment.ContentRootPath) .Load(authorityOptions, builder.Environment.ContentRootPath)

View File

@@ -0,0 +1,59 @@
using System.Collections.Generic;
using StellaOps.Cryptography;
using StellaOps.Cryptography.Kms;
namespace StellaOps.Authority.Signing;
internal sealed class KmsAuthoritySigningKeySource : IAuthoritySigningKeySource
{
private readonly IKmsClient _kmsClient;
public KmsAuthoritySigningKeySource(IKmsClient kmsClient)
=> _kmsClient = kmsClient ?? throw new ArgumentNullException(nameof(kmsClient));
public bool CanLoad(string source)
=> string.Equals(source, "kms", StringComparison.OrdinalIgnoreCase);
public CryptoSigningKey Load(AuthoritySigningKeyRequest request)
{
ArgumentNullException.ThrowIfNull(request);
if (!CanLoad(request.Source))
{
throw new InvalidOperationException($"KMS signing key source cannot load '{request.Source}'.");
}
var keyId = (request.Location ?? string.Empty).Trim();
if (string.IsNullOrWhiteSpace(keyId))
{
throw new InvalidOperationException("KMS signing keys require signing.keyPath/location to specify the key identifier.");
}
request.AdditionalMetadata?.TryGetValue(KmsMetadataKeys.Version, out var versionId);
var material = _kmsClient.ExportAsync(keyId, versionId).GetAwaiter().GetResult();
var parameters = new ECParameters
{
Curve = ECCurve.NamedCurves.nistP256,
D = material.D.ToArray(),
Q = new ECPoint
{
X = material.Qx.ToArray(),
Y = material.Qy.ToArray(),
},
};
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
{
[KmsMetadataKeys.Version] = material.VersionId
};
var reference = new CryptoKeyReference(request.KeyId, request.Provider);
return new CryptoSigningKey(reference, material.Algorithm, in parameters, material.CreatedAt, request.ExpiresAt, metadata: metadata);
}
internal static class KmsMetadataKeys
{
public const string Version = "kms.version";
}
}

View File

@@ -28,6 +28,7 @@
<ProjectReference Include="..\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj" /> <ProjectReference Include="..\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" /> <ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" /> <ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography.Kms/StellaOps.Cryptography.Kms.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" /> <ProjectReference Include="../../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> <ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" />
</ItemGroup> </ItemGroup>
@@ -36,4 +37,4 @@
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content> </Content>
</ItemGroup> </ItemGroup>
</Project> </Project>

View File

@@ -9,4 +9,4 @@
|FEEDCONN-CCCS-02-006 Observability & documentation|DevEx|Docs|**DONE (2025-10-15)** Added `CccsDiagnostics` meter (fetch/parse/map counters), enriched connector logs with document counts, and published `docs/ops/concelier-cccs-operations.md` covering config, telemetry, and sanitiser guidance.| |FEEDCONN-CCCS-02-006 Observability & documentation|DevEx|Docs|**DONE (2025-10-15)** Added `CccsDiagnostics` meter (fetch/parse/map counters), enriched connector logs with document counts, and published `docs/ops/concelier-cccs-operations.md` covering config, telemetry, and sanitiser guidance.|
|FEEDCONN-CCCS-02-007 Historical advisory harvesting plan|BE-Conn-CCCS|Research|**DONE (2025-10-15)** Measured `/api/cccs/threats/v1/get` inventory (~5.1k rows/lang; earliest 2018-06-08), documented backfill workflow + language split strategy, and linked the runbook for Offline Kit execution.| |FEEDCONN-CCCS-02-007 Historical advisory harvesting plan|BE-Conn-CCCS|Research|**DONE (2025-10-15)** Measured `/api/cccs/threats/v1/get` inventory (~5.1k rows/lang; earliest 2018-06-08), documented backfill workflow + language split strategy, and linked the runbook for Offline Kit execution.|
|FEEDCONN-CCCS-02-008 Raw DOM parsing refinement|BE-Conn-CCCS|Source.Common|**DONE (2025-10-15)** Parser now walks unsanitised DOM (heading + nested list coverage), sanitizer keeps `<h#>`/`section` nodes, and regression fixtures/tests assert EN/FR list handling + preserved HTML structure.| |FEEDCONN-CCCS-02-008 Raw DOM parsing refinement|BE-Conn-CCCS|Source.Common|**DONE (2025-10-15)** Parser now walks unsanitised DOM (heading + nested list coverage), sanitizer keeps `<h#>`/`section` nodes, and regression fixtures/tests assert EN/FR list handling + preserved HTML structure.|
|FEEDCONN-CCCS-02-009 Normalized versions rollout (Oct 2025)|BE-Conn-CCCS|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-21)** Implement trailing-version split helper per Merge guidance (see `../Merge/RANGE_PRIMITIVES_COORDINATION.md` “Helper snippets”) to emit `NormalizedVersions` via `SemVerRangeRuleBuilder`; refresh mapper tests/fixtures to assert provenance notes (`cccs:{serial}:{index}`) and confirm merge counters drop.| |FEEDCONN-CCCS-02-009 Normalized versions rollout (Oct 2025)|BE-Conn-CCCS|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-21)** Implement trailing-version split helper per Merge guidance (see `../Merge/RANGE_PRIMITIVES_COORDINATION.md` “Helper snippets”) to emit `NormalizedVersions` via `SemVerRangeRuleBuilder`; refresh mapper tests/fixtures to assert provenance notes (`cccs:{serial}:{index}`) and confirm merge counters drop.<br>2025-10-29: See `docs/dev/normalized-rule-recipes.md` for ready-made helper + regex snippet; wire into `BuildPackages` and update fixtures with `UPDATE_CCCS_FIXTURES=1`.|

View File

@@ -10,4 +10,4 @@
|FEEDCONN-CERTBUND-02-007 Feed history & locale assessment|BE-Conn-CERTBUND|Research|**DONE (2025-10-15)** Measured RSS retention (~6days/≈250 items), captured connector-driven backfill guidance in the runbook, and aligned locale guidance (preserve `language=de`, Docs glossary follow-up). **Next:** coordinate with Tools to land the state-seeding helper so scripted backfills replace manual Mongo tweaks.| |FEEDCONN-CERTBUND-02-007 Feed history & locale assessment|BE-Conn-CERTBUND|Research|**DONE (2025-10-15)** Measured RSS retention (~6days/≈250 items), captured connector-driven backfill guidance in the runbook, and aligned locale guidance (preserve `language=de`, Docs glossary follow-up). **Next:** coordinate with Tools to land the state-seeding helper so scripted backfills replace manual Mongo tweaks.|
|FEEDCONN-CERTBUND-02-008 Session bootstrap & cookie strategy|BE-Conn-CERTBUND|Source.Common|**DONE (2025-10-14)** Feed client primes the portal session (cookie container via `SocketsHttpHandler`), shares cookies across detail requests, and documents bootstrap behaviour in options (`PortalBootstrapUri`).| |FEEDCONN-CERTBUND-02-008 Session bootstrap & cookie strategy|BE-Conn-CERTBUND|Source.Common|**DONE (2025-10-14)** Feed client primes the portal session (cookie container via `SocketsHttpHandler`), shares cookies across detail requests, and documents bootstrap behaviour in options (`PortalBootstrapUri`).|
|FEEDCONN-CERTBUND-02-009 Offline Kit export packaging|BE-Conn-CERTBUND, Docs|Offline Kit|**DONE (2025-10-17)** Added `tools/certbund_offline_snapshot.py` to capture search/export JSON, emit deterministic manifests + SHA files, and refreshed docs (`docs/ops/concelier-certbund-operations.md`, `docs/24_OFFLINE_KIT.md`) with offline-kit instructions and manifest layout guidance. Seed data README/ignore rules cover local snapshot hygiene.| |FEEDCONN-CERTBUND-02-009 Offline Kit export packaging|BE-Conn-CERTBUND, Docs|Offline Kit|**DONE (2025-10-17)** Added `tools/certbund_offline_snapshot.py` to capture search/export JSON, emit deterministic manifests + SHA files, and refreshed docs (`docs/ops/concelier-certbund-operations.md`, `docs/24_OFFLINE_KIT.md`) with offline-kit instructions and manifest layout guidance. Seed data README/ignore rules cover local snapshot hygiene.|
|FEEDCONN-CERTBUND-02-010 Normalized range translator|BE-Conn-CERTBUND|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-22)** Translate `product.Versions` phrases (e.g., `2023.1 bis 2024.2`, `alle`) into comparator strings for `SemVerRangeRuleBuilder`, emit `NormalizedVersions` with `certbund:{advisoryId}:{vendor}` provenance, and extend tests/README with localisation notes.| |FEEDCONN-CERTBUND-02-010 Normalized range translator|BE-Conn-CERTBUND|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-22)** Translate `product.Versions` phrases (e.g., `2023.1 bis 2024.2`, `alle`) into comparator strings for `SemVerRangeRuleBuilder`, emit `NormalizedVersions` with `certbund:{advisoryId}:{vendor}` provenance, and extend tests/README with localisation notes.<br>2025-10-29: `docs/dev/normalized-rule-recipes.md` §3 includes regex starter for German “bis” phrases—integrate into mapper and refresh fixtures via `UPDATE_CERTBUND_FIXTURES=1`.|

View File

@@ -13,4 +13,4 @@
|Express unaffected/investigation statuses without overloading range fields|BE-Conn-RH|Models|**DONE** Introduced AffectedPackageStatus collection and updated mapper/tests.| |Express unaffected/investigation statuses without overloading range fields|BE-Conn-RH|Models|**DONE** Introduced AffectedPackageStatus collection and updated mapper/tests.|
|Reference dedupe & ordering in mapper|BE-Conn-RH|Models|DONE mapper consolidates by URL, merges metadata, deterministic ordering validated in tests.| |Reference dedupe & ordering in mapper|BE-Conn-RH|Models|DONE mapper consolidates by URL, merges metadata, deterministic ordering validated in tests.|
|Hydra summary fetch through SourceFetchService|BE-Conn-RH|Source.Common|DONE summary pages now fetched via SourceFetchService with cache + conditional headers.| |Hydra summary fetch through SourceFetchService|BE-Conn-RH|Source.Common|DONE summary pages now fetched via SourceFetchService with cache + conditional headers.|
|Fixture validation sweep|QA|None|**DOING (2025-10-19)** Prereqs confirmed none; continuing RHSA fixture regeneration and diff review alongside mapper provenance updates.| |Fixture validation sweep|QA|None|**DOING (2025-10-19)** Prereqs confirmed none; continuing RHSA fixture regeneration and diff review alongside mapper provenance updates.<br>2025-10-29: Added `scripts/update-redhat-fixtures.sh` to regenerate golden snapshots with `UPDATE_GOLDENS=1`; run it before reviews to capture CSAF contract deltas.|

View File

@@ -12,4 +12,4 @@
|FEEDCONN-ICSCISA-02-009 GovDelivery credential onboarding|Ops, BE-Conn-ICS-CISA|Ops|**DONE (2025-10-14)** GovDelivery onboarding runbook captured in `docs/ops/concelier-icscisa-operations.md`; secret vault path and Offline Kit handling documented.| |FEEDCONN-ICSCISA-02-009 GovDelivery credential onboarding|Ops, BE-Conn-ICS-CISA|Ops|**DONE (2025-10-14)** GovDelivery onboarding runbook captured in `docs/ops/concelier-icscisa-operations.md`; secret vault path and Offline Kit handling documented.|
|FEEDCONN-ICSCISA-02-010 Mitigation & SemVer polish|BE-Conn-ICS-CISA|02-003, 02-004|**DONE (2025-10-16)** Attachment + mitigation references now land as expected and SemVer primitives carry exact values; end-to-end suite green (see `HANDOVER.md`).| |FEEDCONN-ICSCISA-02-010 Mitigation & SemVer polish|BE-Conn-ICS-CISA|02-003, 02-004|**DONE (2025-10-16)** Attachment + mitigation references now land as expected and SemVer primitives carry exact values; end-to-end suite green (see `HANDOVER.md`).|
|FEEDCONN-ICSCISA-02-011 Docs & telemetry refresh|DevEx|02-006|**DONE (2025-10-16)** Ops documentation refreshed (attachments, SemVer validation, proxy knobs) and telemetry notes verified.| |FEEDCONN-ICSCISA-02-011 Docs & telemetry refresh|DevEx|02-006|**DONE (2025-10-16)** Ops documentation refreshed (attachments, SemVer validation, proxy knobs) and telemetry notes verified.|
|FEEDCONN-ICSCISA-02-012 Normalized version decision|BE-Conn-ICS-CISA|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-23)** Promote existing `SemVerPrimitive` exact values into `NormalizedVersions` via `.ToNormalizedVersionRule("ics-cisa:{advisoryId}:{product}")`, add regression coverage, and open Models ticket if non-SemVer firmware requires a new scheme.| |FEEDCONN-ICSCISA-02-012 Normalized version decision|BE-Conn-ICS-CISA|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-23)** Promote existing `SemVerPrimitive` exact values into `NormalizedVersions` via `.ToNormalizedVersionRule("ics-cisa:{advisoryId}:{product}")`, add regression coverage, and open Models ticket if non-SemVer firmware requires a new scheme.<br>2025-10-29: Follow `docs/dev/normalized-rule-recipes.md` §2 to call `ToNormalizedVersionRule` and ensure mixed firmware strings log a Models ticket when regex extraction fails.|

View File

@@ -9,4 +9,4 @@
|FEEDCONN-CISCO-02-006 Telemetry & documentation|DevEx|Docs|**DONE (2025-10-14)** Cisco diagnostics counters exposed and ops runbook updated with telemetry guidance (`docs/ops/concelier-cisco-operations.md`).| |FEEDCONN-CISCO-02-006 Telemetry & documentation|DevEx|Docs|**DONE (2025-10-14)** Cisco diagnostics counters exposed and ops runbook updated with telemetry guidance (`docs/ops/concelier-cisco-operations.md`).|
|FEEDCONN-CISCO-02-007 API selection decision memo|BE-Conn-Cisco|Research|**DONE (2025-10-11)** Drafted decision matrix: openVuln (structured/delta filters, OAuth throttle) vs RSS (delayed/minimal metadata). Pending OAuth onboarding (`FEEDCONN-CISCO-02-008`) before final recommendation circulated.| |FEEDCONN-CISCO-02-007 API selection decision memo|BE-Conn-Cisco|Research|**DONE (2025-10-11)** Drafted decision matrix: openVuln (structured/delta filters, OAuth throttle) vs RSS (delayed/minimal metadata). Pending OAuth onboarding (`FEEDCONN-CISCO-02-008`) before final recommendation circulated.|
|FEEDCONN-CISCO-02-008 OAuth client provisioning|Ops, BE-Conn-Cisco|Ops|**DONE (2025-10-14)** `docs/ops/concelier-cisco-operations.md` documents OAuth provisioning/rotation, quotas, and Offline Kit distribution guidance.| |FEEDCONN-CISCO-02-008 OAuth client provisioning|Ops, BE-Conn-Cisco|Ops|**DONE (2025-10-14)** `docs/ops/concelier-cisco-operations.md` documents OAuth provisioning/rotation, quotas, and Offline Kit distribution guidance.|
|FEEDCONN-CISCO-02-009 Normalized SemVer promotion|BE-Conn-Cisco|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-21)** Use helper from `../Merge/RANGE_PRIMITIVES_COORDINATION.md` to convert `SemVerPrimitive` outputs into `NormalizedVersionRule` with provenance (`cisco:{productId}`), update mapper/tests, and confirm merge normalized-rule counters drop.| |FEEDCONN-CISCO-02-009 Normalized SemVer promotion|BE-Conn-Cisco|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-21)** Use helper from `../Merge/RANGE_PRIMITIVES_COORDINATION.md` to convert `SemVerPrimitive` outputs into `NormalizedVersionRule` with provenance (`cisco:{productId}`), update mapper/tests, and confirm merge normalized-rule counters drop.<br>2025-10-29: Follow `docs/dev/normalized-rule-recipes.md` §2 to convert existing primitives (`CiscoMapper`) and document provenance in fixtures (`UPDATE_CISCO_FIXTURES=1`).|

View File

@@ -336,46 +336,67 @@ internal sealed class AdvisoryRawService : IAdvisoryRawService
string.IsNullOrWhiteSpace(content.Encoding) ? null : content.Encoding.Trim()); string.IsNullOrWhiteSpace(content.Encoding) ? null : content.Encoding.Trim());
} }
private static RawIdentifiers NormalizeIdentifiers(RawIdentifiers identifiers) private static RawIdentifiers NormalizeIdentifiers(RawIdentifiers identifiers)
{ {
var normalizedAliases = identifiers.Aliases var aliases = identifiers.Aliases;
.Where(static alias => !string.IsNullOrWhiteSpace(alias)) if (!aliases.IsDefaultOrEmpty)
.Select(static alias => alias.Trim()) {
.Distinct(StringComparer.OrdinalIgnoreCase) var builder = ImmutableArray.CreateBuilder<string>(aliases.Length);
.ToImmutableArray(); foreach (var alias in aliases)
{
return new RawIdentifiers( if (string.IsNullOrWhiteSpace(alias))
normalizedAliases, {
identifiers.PrimaryId?.Trim() ?? string.Empty); continue;
} }
private static RawLinkset NormalizeLinkset(RawLinkset linkset) builder.Add(alias.Trim());
{ }
return new RawLinkset
{ aliases = builder.ToImmutable();
Aliases = NormalizeStringArray(linkset.Aliases, StringComparer.OrdinalIgnoreCase), }
PackageUrls = NormalizeStringArray(linkset.PackageUrls, StringComparer.Ordinal), else
Cpes = NormalizeStringArray(linkset.Cpes, StringComparer.Ordinal), {
References = NormalizeReferences(linkset.References), aliases = ImmutableArray<string>.Empty;
ReconciledFrom = NormalizeStringArray(linkset.ReconciledFrom, StringComparer.Ordinal), }
Notes = linkset.Notes ?? ImmutableDictionary<string, string>.Empty,
}; return new RawIdentifiers(
} aliases,
identifiers.PrimaryId?.Trim() ?? string.Empty);
private static ImmutableArray<string> NormalizeStringArray(ImmutableArray<string> values, StringComparer comparer) }
{
if (values.IsDefaultOrEmpty) private static RawLinkset NormalizeLinkset(RawLinkset linkset)
{ {
return EmptyArray; return new RawLinkset
} {
Aliases = NormalizeStringArray(linkset.Aliases),
return values PackageUrls = NormalizeStringArray(linkset.PackageUrls),
.Where(static value => !string.IsNullOrWhiteSpace(value)) Cpes = NormalizeStringArray(linkset.Cpes),
.Select(static value => value.Trim()) References = NormalizeReferences(linkset.References),
.Distinct(comparer) ReconciledFrom = NormalizeStringArray(linkset.ReconciledFrom),
.OrderBy(static value => value, comparer) Notes = linkset.Notes ?? ImmutableDictionary<string, string>.Empty,
.ToImmutableArray(); };
} }
private static ImmutableArray<string> NormalizeStringArray(ImmutableArray<string> values)
{
if (values.IsDefaultOrEmpty)
{
return ImmutableArray<string>.Empty;
}
var builder = ImmutableArray.CreateBuilder<string>(values.Length);
foreach (var value in values)
{
if (string.IsNullOrWhiteSpace(value))
{
continue;
}
builder.Add(value.Trim());
}
return builder.ToImmutable();
}
private static ImmutableArray<RawReference> NormalizeReferences(ImmutableArray<RawReference> references) private static ImmutableArray<RawReference> NormalizeReferences(ImmutableArray<RawReference> references)
{ {

View File

@@ -12,8 +12,9 @@
| CONCELIER-CORE-AOC-19-003 `Idempotent append-only upsert` | DONE (2025-10-28) | Concelier Core Guild | CONCELIER-STORE-AOC-19-002 | Implement idempotent upsert path using `(vendor, upstreamId, contentHash, tenant)` key, emitting supersedes pointers for new revisions and preventing duplicate inserts. | | CONCELIER-CORE-AOC-19-003 `Idempotent append-only upsert` | DONE (2025-10-28) | Concelier Core Guild | CONCELIER-STORE-AOC-19-002 | Implement idempotent upsert path using `(vendor, upstreamId, contentHash, tenant)` key, emitting supersedes pointers for new revisions and preventing duplicate inserts. |
> 2025-10-28: Advisory raw ingestion now strips client-supplied supersedes hints, logs ignored pointers, and surfaces repository-supplied supersedes identifiers; service tests cover duplicate handling and append-only semantics. > 2025-10-28: Advisory raw ingestion now strips client-supplied supersedes hints, logs ignored pointers, and surfaces repository-supplied supersedes identifiers; service tests cover duplicate handling and append-only semantics.
> Docs alignment (2025-10-26): Deployment guide + observability guide describe supersedes metrics; ensure implementation emits `aoc_violation_total` on failure. > Docs alignment (2025-10-26): Deployment guide + observability guide describe supersedes metrics; ensure implementation emits `aoc_violation_total` on failure.
| CONCELIER-CORE-AOC-19-004 `Remove ingestion normalization` | DOING (2025-10-28) | Concelier Core Guild | CONCELIER-CORE-AOC-19-002, POLICY-AOC-19-003 | Strip normalization/dedup/severity logic from ingestion pipelines, delegate derived computations to Policy Engine, and update exporters/tests to consume raw documents only. | | CONCELIER-CORE-AOC-19-004 `Remove ingestion normalization` | DOING (2025-10-28) | Concelier Core Guild | CONCELIER-CORE-AOC-19-002, POLICY-AOC-19-003 | Strip normalization/dedup/severity logic from ingestion pipelines, delegate derived computations to Policy Engine, and update exporters/tests to consume raw documents only. |
> Docs alignment (2025-10-26): Architecture overview emphasises policy-only derivation; coordinate with Policy Engine guild for rollout. > Docs alignment (2025-10-26): Architecture overview emphasises policy-only derivation; coordinate with Policy Engine guild for rollout.
> 2025-10-29: `AdvisoryRawService` now preserves upstream alias/linkset ordering (trim-only) and updated AOC documentation reflects the behaviour; follow-up to ensure policy consumers handle duplicates remains open.
| CONCELIER-CORE-AOC-19-013 `Authority tenant scope smoke coverage` | TODO | Concelier Core Guild | AUTH-AOC-19-002 | Extend Concelier smoke/e2e fixtures to configure `requiredTenants` and assert cross-tenant rejection with updated Authority tokens. | Coordinate deliverable so Authority docs (`AUTH-AOC-19-003`) can close once tests are in place. | | CONCELIER-CORE-AOC-19-013 `Authority tenant scope smoke coverage` | TODO | Concelier Core Guild | AUTH-AOC-19-002 | Extend Concelier smoke/e2e fixtures to configure `requiredTenants` and assert cross-tenant rejection with updated Authority tokens. | Coordinate deliverable so Authority docs (`AUTH-AOC-19-003`) can close once tests are in place. |
## Policy Engine v2 ## Policy Engine v2
@@ -27,10 +28,12 @@
| ID | Status | Owner(s) | Depends on | Notes | | ID | Status | Owner(s) | Depends on | Notes |
|----|--------|----------|------------|-------| |----|--------|----------|------------|-------|
| CONCELIER-GRAPH-21-001 `SBOM projection enrichment` | BLOCKED (2025-10-27) | Concelier Core Guild, Cartographer Guild | CONCELIER-POLICY-20-002, CARTO-GRAPH-21-002 | Extend SBOM normalization to emit full relationship graph (depends_on/contains/provides), scope tags, entrypoint annotations, and component metadata required by Cartographer. | | CONCELIER-GRAPH-21-001 `SBOM projection enrichment` | BLOCKED (2025-10-27) | Concelier Core Guild, Cartographer Guild | CONCELIER-POLICY-20-002, CARTO-GRAPH-21-002 | Extend SBOM normalization to emit full relationship graph (depends_on/contains/provides), scope tags, entrypoint annotations, and component metadata required by Cartographer. |
> 2025-10-27: Waiting on policy-driven linkset enrichment (`CONCELIER-POLICY-20-002`) and Cartographer API contract (`CARTO-GRAPH-21-002`) to define required relationship payloads. Without those schemas the projection changes cannot be implemented deterministically. > 2025-10-27: Waiting on policy-driven linkset enrichment (`CONCELIER-POLICY-20-002`) and Cartographer API contract (`CARTO-GRAPH-21-002`) to define required relationship payloads. Without those schemas the projection changes cannot be implemented deterministically.
| CONCELIER-GRAPH-21-002 `Change events` | BLOCKED (2025-10-27) | Concelier Core Guild, Scheduler Guild | CONCELIER-GRAPH-21-001 | Publish change events (new SBOM version, relationship delta) for Cartographer build queue; ensure events include tenant/context metadata. | > 2025-10-29: Cross-guild handshake captured in `docs/dev/cartographer-graph-handshake.md`; begin drafting enrichment plan once Cartographer ships the inspector schema/query patterns.
> 2025-10-27: Depends on `CONCELIER-GRAPH-21-001`; event schema hinges on finalized projection output and Cartographer webhook contract, both pending. | CONCELIER-GRAPH-21-002 `Change events` | BLOCKED (2025-10-27) | Concelier Core Guild, Scheduler Guild | CONCELIER-GRAPH-21-001 | Publish change events (new SBOM version, relationship delta) for Cartographer build queue; ensure events include tenant/context metadata. |
> 2025-10-27: Depends on `CONCELIER-GRAPH-21-001`; event schema hinges on finalized projection output and Cartographer webhook contract, both pending.
> 2025-10-29: Action item from handshake doc — prepare sample `sbom.relationship.changed` payload + replay notes once schema lands; coordinate with Scheduler for queue semantics.
## Link-Not-Merge v1 ## Link-Not-Merge v1

View File

@@ -92,6 +92,7 @@ Until these blocks land, connectors should stage changes behind a feature flag o
## Tracking & follow-up ## Tracking & follow-up
- Track due dates above; if a connector slips past its deadline, flag in `#concelier-merge` stand-up and open a blocker ticket referencing FEEDMERGE-COORD-02-900. - Track due dates above; if a connector slips past its deadline, flag in `#concelier-merge` stand-up and open a blocker ticket referencing FEEDMERGE-COORD-02-900.
- Capture connector progress updates in stand-ups twice per week; link PRs/issues back to this document and the rollout dashboard (`docs/dev/normalized_versions_rollout.md`). - Capture connector progress updates in stand-ups twice per week; link PRs/issues back to this document and the rollout dashboard (`docs/dev/normalized_versions_rollout.md`).
- Monitor merge counters `concelier.merge.normalized_rules` and `concelier.merge.normalized_rules_missing` to spot advisories that still lack normalized arrays after precedence merge. - Monitor merge counters `concelier.merge.normalized_rules` and `concelier.merge.normalized_rules_missing` to spot advisories that still lack normalized arrays after precedence merge.
- Precedence merge emits `Normalized version rules missing` warnings (source + package type) whenever we encounter ranges without normalized output—watch CI/staging logs for those signals to prioritise backlog fixes.
- When a connector is ready to emit normalized rules, update its module `TASKS.md` status and ping Merge in `#concelier-merge` with fixture diff screenshots. - When a connector is ready to emit normalized rules, update its module `TASKS.md` status and ping Merge in `#concelier-merge` with fixture diff screenshots.
- If new schemes or comparer logic is required (e.g., Cisco IOS), open a Models issue referencing `FEEDMODELS-SCHEMA-02-900` before implementing. - If new schemes or comparer logic is required (e.g., Cisco IOS), open a Models issue referencing `FEEDMODELS-SCHEMA-02-900` before implementing.

View File

@@ -162,7 +162,7 @@ public sealed class AdvisoryPrecedenceMerger
.ToArray(); .ToArray();
var packageResult = _packageResolver.Merge(ordered.SelectMany(entry => entry.Advisory.AffectedPackages)); var packageResult = _packageResolver.Merge(ordered.SelectMany(entry => entry.Advisory.AffectedPackages));
RecordNormalizedRuleMetrics(packageResult.Packages); RecordNormalizedRuleMetrics(advisoryKey, packageResult.Packages);
var affectedPackages = packageResult.Packages; var affectedPackages = packageResult.Packages;
var cvssMetrics = ordered var cvssMetrics = ordered
.SelectMany(entry => entry.Advisory.CvssMetrics) .SelectMany(entry => entry.Advisory.CvssMetrics)
@@ -217,13 +217,16 @@ public sealed class AdvisoryPrecedenceMerger
return new PrecedenceMergeResult(merged, conflicts); return new PrecedenceMergeResult(merged, conflicts);
} }
private static void RecordNormalizedRuleMetrics(IReadOnlyList<AffectedPackage> packages) private void RecordNormalizedRuleMetrics(string advisoryKey, IReadOnlyList<AffectedPackage> packages)
{ {
if (packages.Count == 0) if (packages.Count == 0)
{ {
return; return;
} }
var missingSources = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
var missingPackageTypes = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var package in packages) foreach (var package in packages)
{ {
var packageType = package.Type ?? string.Empty; var packageType = package.Type ?? string.Empty;
@@ -249,8 +252,41 @@ public sealed class AdvisoryPrecedenceMerger
}; };
MissingNormalizedRuleCounter.Add(1, tags); MissingNormalizedRuleCounter.Add(1, tags);
if (package.Provenance.Length > 0)
{
foreach (var provenance in package.Provenance)
{
if (string.IsNullOrWhiteSpace(provenance.Source))
{
continue;
}
if (!string.Equals(provenance.Source, "merge", StringComparison.OrdinalIgnoreCase))
{
missingSources.Add(provenance.Source);
}
}
}
if (!string.IsNullOrWhiteSpace(packageType))
{
missingPackageTypes.Add(packageType);
}
} }
} }
if (missingSources.Count > 0)
{
var sources = string.Join(",", missingSources.OrderBy(static s => s, StringComparer.OrdinalIgnoreCase));
var packageTypes = string.Join(",", missingPackageTypes.OrderBy(static s => s, StringComparer.OrdinalIgnoreCase));
_logger.LogWarning(
"Normalized version rules missing for {AdvisoryKey}; sources={Sources}; packageTypes={PackageTypes}",
advisoryKey,
sources,
packageTypes);
}
} }
private string? PickString(IEnumerable<AdvisoryEntry> ordered, Func<Advisory, string?> selector) private string? PickString(IEnumerable<AdvisoryEntry> ordered, Func<Advisory, string?> selector)

View File

@@ -15,14 +15,14 @@
|FEEDMERGE-QA-04-001 End-to-end conflict regression suite|QA|Merge|DONE `AdvisoryMergeServiceTests.MergeAsync_AppliesCanonicalRulesAndPersistsDecisions` exercises GHSA/NVD/OSV conflict path and merge-event analytics. **Reminder:** QA to sync with connector teams once new fixture triples land.| |FEEDMERGE-QA-04-001 End-to-end conflict regression suite|QA|Merge|DONE `AdvisoryMergeServiceTests.MergeAsync_AppliesCanonicalRulesAndPersistsDecisions` exercises GHSA/NVD/OSV conflict path and merge-event analytics. **Reminder:** QA to sync with connector teams once new fixture triples land.|
|Override audit logging|BE-Merge|Observability|DONE override audits now emit structured logs plus bounded-tag metrics suitable for prod telemetry.| |Override audit logging|BE-Merge|Observability|DONE override audits now emit structured logs plus bounded-tag metrics suitable for prod telemetry.|
|Configurable precedence table|BE-Merge|Architecture|DONE precedence options bind via concelier:merge:precedence:ranks with docs/tests covering operator workflow.| |Configurable precedence table|BE-Merge|Architecture|DONE precedence options bind via concelier:merge:precedence:ranks with docs/tests covering operator workflow.|
|Range primitives backlog|BE-Merge|Connector WGs|**DOING** Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical RangePrimitives with provenance tags; track progress/fixtures here.<br>2025-10-11: Storage alignment notes + sample normalized rule JSON now captured in `RANGE_PRIMITIVES_COORDINATION.md` (see “Storage alignment quick reference”).<br>2025-10-11 18:45Z: GHSA normalized rules landed; OSV connector picked up next for rollout.<br>2025-10-11 21:10Z: `docs/dev/merge_semver_playbook.md` Section 8 now documents the persisted Mongo projection (SemVer + NEVRA) for connector reviewers.<br>2025-10-11 21:30Z: Added `docs/dev/normalized_versions_rollout.md` dashboard to centralize connector status and upcoming milestones.<br>2025-10-11 21:55Z: Merge now emits `concelier.merge.normalized_rules*` counters and unions connector-provided normalized arrays; see new test coverage in `AdvisoryPrecedenceMergerTests.Merge_RecordsNormalizedRuleMetrics`.<br>2025-10-12 17:05Z: CVE + KEV normalized rule verification complete; OSV parity fixtures revalidated—downstream parity/monitoring tasks may proceed.<br>2025-10-19 14:35Z: Prerequisites reviewed (none outstanding); FEEDMERGE-COORD-02-900 remains in DOING with connector follow-ups unchanged.<br>2025-10-19 15:25Z: Refreshed `RANGE_PRIMITIVES_COORDINATION.md` matrix + added targeted follow-ups (Cccs, CertBund, ICS-CISA, Kisa, Vndr.Cisco) with delivery dates 2025-10-21 → 2025-10-25; monitoring merge counters for regression.| |Range primitives backlog|BE-Merge|Connector WGs|**DOING** Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical RangePrimitives with provenance tags; track progress/fixtures here.<br>2025-10-11: Storage alignment notes + sample normalized rule JSON now captured in `RANGE_PRIMITIVES_COORDINATION.md` (see “Storage alignment quick reference”).<br>2025-10-11 18:45Z: GHSA normalized rules landed; OSV connector picked up next for rollout.<br>2025-10-11 21:10Z: `docs/dev/merge_semver_playbook.md` Section 8 now documents the persisted Mongo projection (SemVer + NEVRA) for connector reviewers.<br>2025-10-11 21:30Z: Added `docs/dev/normalized_versions_rollout.md` dashboard to centralize connector status and upcoming milestones.<br>2025-10-11 21:55Z: Merge now emits `concelier.merge.normalized_rules*` counters and unions connector-provided normalized arrays; see new test coverage in `AdvisoryPrecedenceMergerTests.Merge_RecordsNormalizedRuleMetrics`.<br>2025-10-12 17:05Z: CVE + KEV normalized rule verification complete; OSV parity fixtures revalidated—downstream parity/monitoring tasks may proceed.<br>2025-10-19 14:35Z: Prerequisites reviewed (none outstanding); FEEDMERGE-COORD-02-900 remains in DOING with connector follow-ups unchanged.<br>2025-10-19 15:25Z: Refreshed `RANGE_PRIMITIVES_COORDINATION.md` matrix + added targeted follow-ups (Cccs, CertBund, ICS-CISA, Kisa, Vndr.Cisco) with delivery dates 2025-10-21 → 2025-10-25; monitoring merge counters for regression.<br>2025-10-29: Added merge-time warnings highlighting sources/package types when ranges emit without normalized rules to accelerate backlog triage.|
|Range primitives backlog|BE-Merge|Connector WGs|**DOING** Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical RangePrimitives with provenance tags; track progress/fixtures here.<br>2025-10-11: Storage alignment notes + sample normalized rule JSON now captured in `RANGE_PRIMITIVES_COORDINATION.md` (see “Storage alignment quick reference”).<br>2025-10-11 18:45Z: GHSA normalized rules landed; OSV connector picked up next for rollout.<br>2025-10-11 21:10Z: `docs/dev/merge_semver_playbook.md` Section 8 now documents the persisted Mongo projection (SemVer + NEVRA) for connector reviewers.<br>2025-10-11 21:30Z: Added `docs/dev/normalized_versions_rollout.md` dashboard to centralize connector status and upcoming milestones.<br>2025-10-11 21:55Z: Merge now emits `concelier.merge.normalized_rules*` counters and unions connector-provided normalized arrays; see new test coverage in `AdvisoryPrecedenceMergerTests.Merge_RecordsNormalizedRuleMetrics`.<br>2025-10-12 17:05Z: CVE + KEV normalized rule verification complete; OSV parity fixtures revalidated—downstream parity/monitoring tasks may proceed.<br>2025-10-19 14:35Z: Prerequisites reviewed (none outstanding); FEEDMERGE-COORD-02-900 remains in DOING with connector follow-ups unchanged.<br>2025-10-19 15:25Z: Refreshed `RANGE_PRIMITIVES_COORDINATION.md` matrix + added targeted follow-ups (Cccs, CertBund, ICS-CISA, Kisa, Vndr.Cisco) with delivery dates 2025-10-21 → 2025-10-25; monitoring merge counters for regression.<br>2025-10-20 19:30Z: Coordination matrix + rollout dashboard updated with current connector statuses and due dates; flagged Slack escalation plan if Cccs/Cisco miss 2025-10-21 and documented Acsc kickoff window for 2025-10-24.| |Range primitives backlog|BE-Merge|Connector WGs|**DOING** Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical RangePrimitives with provenance tags; track progress/fixtures here.<br>2025-10-11: Storage alignment notes + sample normalized rule JSON now captured in `RANGE_PRIMITIVES_COORDINATION.md` (see “Storage alignment quick reference”).<br>2025-10-11 18:45Z: GHSA normalized rules landed; OSV connector picked up next for rollout.<br>2025-10-11 21:10Z: `docs/dev/merge_semver_playbook.md` Section 8 now documents the persisted Mongo projection (SemVer + NEVRA) for connector reviewers.<br>2025-10-11 21:30Z: Added `docs/dev/normalized_versions_rollout.md` dashboard to centralize connector status and upcoming milestones.<br>2025-10-11 21:55Z: Merge now emits `concelier.merge.normalized_rules*` counters and unions connector-provided normalized arrays; see new test coverage in `AdvisoryPrecedenceMergerTests.Merge_RecordsNormalizedRuleMetrics`.<br>2025-10-12 17:05Z: CVE + KEV normalized rule verification complete; OSV parity fixtures revalidated—downstream parity/monitoring tasks may proceed.<br>2025-10-19 14:35Z: Prerequisites reviewed (none outstanding); FEEDMERGE-COORD-02-900 remains in DOING with connector follow-ups unchanged.<br>2025-10-19 15:25Z: Refreshed `RANGE_PRIMITIVES_COORDINATION.md` matrix + added targeted follow-ups (Cccs, CertBund, ICS-CISA, Kisa, Vndr.Cisco) with delivery dates 2025-10-21 → 2025-10-25; monitoring merge counters for regression.<br>2025-10-20 19:30Z: Coordination matrix + rollout dashboard updated with current connector statuses and due dates; flagged Slack escalation plan if Cccs/Cisco miss 2025-10-21 and documented Acsc kickoff window for 2025-10-24.|
|Merge pipeline parity for new advisory fields|BE-Merge|Models, Core|DONE (2025-10-15) merge service now surfaces description/CWE/canonical metric decisions with updated metrics/tests.| |Merge pipeline parity for new advisory fields|BE-Merge|Models, Core|DONE (2025-10-15) merge service now surfaces description/CWE/canonical metric decisions with updated metrics/tests.|
|Connector coordination for new advisory fields|Connector Leads, BE-Merge|Models, Core|**DONE (2025-10-15)** GHSA, NVD, and OSV connectors now emit advisory descriptions, CWE weaknesses, and canonical metric ids. Fixtures refreshed (GHSA connector regression suite, `conflict-nvd.canonical.json`, OSV parity snapshots) and completion recorded in coordination log.| |Connector coordination for new advisory fields|Connector Leads, BE-Merge|Models, Core|**DONE (2025-10-15)** GHSA, NVD, and OSV connectors now emit advisory descriptions, CWE weaknesses, and canonical metric ids. Fixtures refreshed (GHSA connector regression suite, `conflict-nvd.canonical.json`, OSV parity snapshots) and completion recorded in coordination log.|
|FEEDMERGE-ENGINE-07-001 Conflict sets & explainers|BE-Merge|FEEDSTORAGE-DATA-07-001|**DONE (2025-10-20)** Merge surfaces conflict explainers with replay hashes via `MergeConflictSummary`; API exposes structured payloads and integration tests cover deterministic `asOf` hashes.| |FEEDMERGE-ENGINE-07-001 Conflict sets & explainers|BE-Merge|FEEDSTORAGE-DATA-07-001|**DONE (2025-10-20)** Merge surfaces conflict explainers with replay hashes via `MergeConflictSummary`; API exposes structured payloads and integration tests cover deterministic `asOf` hashes.|
> Remark (2025-10-20): `AdvisoryMergeService` now returns conflict summaries with deterministic hashes; WebService replay endpoint emits typed explainers verified by new tests. > Remark (2025-10-20): `AdvisoryMergeService` now returns conflict summaries with deterministic hashes; WebService replay endpoint emits typed explainers verified by new tests.
|FEEDMERGE-COORD-02-901 Connector deadline check-ins|BE-Merge|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-21)** Confirm Cccs/Cisco normalized-rule branches land, capture `concelier.merge.normalized_rules*` counter screenshots, and update coordination docs with the results.| |FEEDMERGE-COORD-02-901 Connector deadline check-ins|BE-Merge|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-21)** Confirm Cccs/Cisco normalized-rule branches land, capture `concelier.merge.normalized_rules*` counter screenshots, and update coordination docs with the results.<br>2025-10-29: Merge now emits `Normalized version rules missing...` warnings (see `docs/dev/normalized-rule-recipes.md` §4); include zero-warning excerpt plus Grafana counter snapshot when closing this task.|
|FEEDMERGE-COORD-02-902 ICS-CISA normalized-rule decision support|BE-Merge, Models|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-23)** Review ICS-CISA sample advisories, confirm SemVer reuse vs new firmware scheme, pre-stage Models ticket template, and document outcome in coordination docs + tracker files.| |FEEDMERGE-COORD-02-902 ICS-CISA normalized-rule decision support|BE-Merge, Models|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-23)** Review ICS-CISA sample advisories, confirm SemVer reuse vs new firmware scheme, pre-stage Models ticket template, and document outcome in coordination docs + tracker files.<br>2025-10-29: Recipes doc (§2§3) outlines SemVer promotion + fallback logging—attach decision summary + log sample when handing off to Models.|
|FEEDMERGE-COORD-02-903 KISA firmware scheme review|BE-Merge, Models|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-24)** Pair with KISA team on proposed firmware scheme (`kisa.build` or variant), ensure builder alignment, open Models ticket if required, and log decision in coordination docs + tracker files.| |FEEDMERGE-COORD-02-903 KISA firmware scheme review|BE-Merge, Models|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-24)** Pair with KISA team on proposed firmware scheme (`kisa.build` or variant), ensure builder alignment, open Models ticket if required, and log decision in coordination docs + tracker files.|
## Link-Not-Merge v1 Transition ## Link-Not-Merge v1 Transition

View File

@@ -0,0 +1,212 @@
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Mongo2Go;
using MongoDB.Bson;
using MongoDB.Driver;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Concelier.Connector.Common;
using StellaOps.Concelier.Connector.Common.Fetch;
using StellaOps.Concelier.Connector.Common.State;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.Documents;
namespace StellaOps.Concelier.Connector.Common.Tests;
public sealed class SourceStateSeedProcessorTests : IAsyncLifetime
{
private readonly MongoDbRunner _runner;
private readonly MongoClient _client;
private readonly IMongoDatabase _database;
private readonly DocumentStore _documentStore;
private readonly RawDocumentStorage _rawStorage;
private readonly MongoSourceStateRepository _stateRepository;
private readonly FakeTimeProvider _timeProvider;
public SourceStateSeedProcessorTests()
{
_runner = MongoDbRunner.Start(singleNodeReplSet: true);
_client = new MongoClient(_runner.ConnectionString);
_database = _client.GetDatabase($"source-state-seed-{Guid.NewGuid():N}");
_documentStore = new DocumentStore(_database, NullLogger<DocumentStore>.Instance);
_rawStorage = new RawDocumentStorage(_database);
_stateRepository = new MongoSourceStateRepository(_database, NullLogger<MongoSourceStateRepository>.Instance);
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 28, 12, 0, 0, TimeSpan.Zero));
}
[Fact]
public async Task ProcessAsync_PersistsDocumentsAndUpdatesCursor()
{
var processor = CreateProcessor();
var documentId = Guid.NewGuid();
var specification = new SourceStateSeedSpecification
{
Source = "vndr.test",
Documents = new[]
{
new SourceStateSeedDocument
{
DocumentId = documentId,
Uri = "https://example.test/advisories/ADV-1",
Content = Encoding.UTF8.GetBytes("{\"id\":\"ADV-1\"}"),
ContentType = "application/json",
Headers = new Dictionary<string, string> { ["X-Test"] = "true" },
Metadata = new Dictionary<string, string> { ["test.meta"] = "value" },
FetchedAt = _timeProvider.GetUtcNow().AddMinutes(-5),
AddToPendingDocuments = true,
AddToPendingMappings = true,
KnownIdentifiers = new[] { "ADV-1" },
}
},
Cursor = new SourceStateSeedCursor
{
LastModifiedCursor = _timeProvider.GetUtcNow().AddDays(-1),
LastFetchAt = _timeProvider.GetUtcNow().AddMinutes(-10),
Additional = new Dictionary<string, string> { ["custom"] = "value" },
},
KnownAdvisories = new[] { "ADV-0" },
};
var result = await processor.ProcessAsync(specification, CancellationToken.None);
Assert.Equal(1, result.DocumentsProcessed);
Assert.Single(result.PendingDocumentIds);
Assert.Contains(documentId, result.PendingDocumentIds);
Assert.Single(result.PendingMappingIds);
Assert.Contains(documentId, result.PendingMappingIds);
Assert.Equal(2, result.KnownAdvisoriesAdded.Count);
Assert.Contains("ADV-0", result.KnownAdvisoriesAdded);
Assert.Contains("ADV-1", result.KnownAdvisoriesAdded);
Assert.Equal(_timeProvider.GetUtcNow(), result.CompletedAt);
var storedDocument = await _documentStore.FindBySourceAndUriAsync(
"vndr.test",
"https://example.test/advisories/ADV-1",
CancellationToken.None);
Assert.NotNull(storedDocument);
Assert.Equal(documentId, storedDocument!.Id);
Assert.Equal("application/json", storedDocument.ContentType);
Assert.Equal(DocumentStatuses.PendingParse, storedDocument.Status);
Assert.NotNull(storedDocument.GridFsId);
Assert.NotNull(storedDocument.Headers);
Assert.Equal("true", storedDocument.Headers!["X-Test"]);
Assert.NotNull(storedDocument.Metadata);
Assert.Equal("value", storedDocument.Metadata!["test.meta"]);
var filesCollection = _database.GetCollection<BsonDocument>("documents.files");
var fileCount = await filesCollection.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty);
Assert.Equal(1, fileCount);
var state = await _stateRepository.TryGetAsync("vndr.test", CancellationToken.None);
Assert.NotNull(state);
Assert.Equal(_timeProvider.GetUtcNow().UtcDateTime, state!.LastSuccess);
var cursor = state.Cursor;
var pendingDocs = cursor["pendingDocuments"].AsBsonArray.Select(v => Guid.Parse(v.AsString)).ToList();
Assert.Contains(documentId, pendingDocs);
var pendingMappings = cursor["pendingMappings"].AsBsonArray.Select(v => Guid.Parse(v.AsString)).ToList();
Assert.Contains(documentId, pendingMappings);
var knownAdvisories = cursor["knownAdvisories"].AsBsonArray.Select(v => v.AsString).ToList();
Assert.Contains("ADV-0", knownAdvisories);
Assert.Contains("ADV-1", knownAdvisories);
Assert.Equal(_timeProvider.GetUtcNow().UtcDateTime, cursor["lastSeededAt"].ToUniversalTime());
Assert.Equal("value", cursor["custom"].AsString);
}
[Fact]
public async Task ProcessAsync_ReplacesExistingDocumentAndCleansPreviousRawPayload()
{
var processor = CreateProcessor();
var documentId = Guid.NewGuid();
var initialSpecification = new SourceStateSeedSpecification
{
Source = "vndr.test",
Documents = new[]
{
new SourceStateSeedDocument
{
DocumentId = documentId,
Uri = "https://example.test/advisories/ADV-2",
Content = Encoding.UTF8.GetBytes("{\"id\":\"ADV-2\",\"rev\":1}"),
ContentType = "application/json",
AddToPendingDocuments = true,
}
},
KnownAdvisories = new[] { "ADV-2" },
};
await processor.ProcessAsync(initialSpecification, CancellationToken.None);
var existingRecord = await _documentStore.FindBySourceAndUriAsync(
"vndr.test",
"https://example.test/advisories/ADV-2",
CancellationToken.None);
Assert.NotNull(existingRecord);
var previousGridId = existingRecord!.GridFsId;
Assert.NotNull(previousGridId);
var filesCollection = _database.GetCollection<BsonDocument>("documents.files");
var initialFiles = await filesCollection.Find(FilterDefinition<BsonDocument>.Empty).ToListAsync();
Assert.Single(initialFiles);
var updatedSpecification = new SourceStateSeedSpecification
{
Source = "vndr.test",
Documents = new[]
{
new SourceStateSeedDocument
{
DocumentId = documentId,
Uri = "https://example.test/advisories/ADV-2",
Content = Encoding.UTF8.GetBytes("{\"id\":\"ADV-2\",\"rev\":2}"),
ContentType = "application/json",
AddToPendingDocuments = true,
}
}
};
var secondResult = await processor.ProcessAsync(updatedSpecification, CancellationToken.None);
Assert.Equal(1, secondResult.DocumentsProcessed);
Assert.Empty(secondResult.PendingDocumentIds); // already present in cursor
Assert.Empty(secondResult.PendingMappingIds);
var refreshedRecord = await _documentStore.FindBySourceAndUriAsync(
"vndr.test",
"https://example.test/advisories/ADV-2",
CancellationToken.None);
Assert.NotNull(refreshedRecord);
Assert.Equal(documentId, refreshedRecord!.Id);
Assert.NotNull(refreshedRecord.GridFsId);
Assert.NotEqual(previousGridId, refreshedRecord.GridFsId);
var files = await filesCollection.Find(FilterDefinition<BsonDocument>.Empty).ToListAsync();
Assert.Single(files);
Assert.NotEqual(previousGridId, files[0]["_id"].AsObjectId);
}
private SourceStateSeedProcessor CreateProcessor()
=> new(
_documentStore,
_rawStorage,
_stateRepository,
_timeProvider,
NullLogger<SourceStateSeedProcessor>.Instance);
public Task InitializeAsync() => Task.CompletedTask;
public async Task DisposeAsync()
{
await _client.DropDatabaseAsync(_database.DatabaseNamespace.DatabaseName);
_runner.Dispose();
}
}

View File

@@ -4,8 +4,21 @@
<TargetFramework>net10.0</TargetFramework> <TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings> <ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable> <Nullable>enable</Nullable>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> <PackageReference Include="coverlet.collector" Version="6.0.4" />
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
<PackageReference Include="Mongo2Go" Version="4.1.0" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
</ItemGroup> </ItemGroup>
</Project> <ItemGroup>
<Using Include="Xunit" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -34,21 +34,41 @@ public sealed class AdvisoryRawServiceTests
} }
[Fact] [Fact]
public async Task IngestAsync_PropagatesRepositoryDuplicateResult() public async Task IngestAsync_PropagatesRepositoryDuplicateResult()
{ {
var repository = new RecordingRepository(); var repository = new RecordingRepository();
var service = CreateService(repository); var service = CreateService(repository);
var existingDocument = CreateDocument(); var existingDocument = CreateDocument();
var expectedResult = new AdvisoryRawUpsertResult(false, CreateRecord(existingDocument)); var expectedResult = new AdvisoryRawUpsertResult(false, CreateRecord(existingDocument));
repository.NextResult = expectedResult; repository.NextResult = expectedResult;
var result = await service.IngestAsync(CreateDocument(), CancellationToken.None); var result = await service.IngestAsync(CreateDocument(), CancellationToken.None);
Assert.False(result.Inserted); Assert.False(result.Inserted);
Assert.Same(expectedResult.Record, result.Record); Assert.Same(expectedResult.Record, result.Record);
} }
[Fact]
public async Task IngestAsync_PreservesAliasOrderAndDuplicates()
{
var repository = new RecordingRepository();
var service = CreateService(repository);
var aliasSeries = ImmutableArray.Create("CVE-2025-0001", "CVE-2025-0001", "GHSA-xxxx", "cve-2025-0001");
var document = CreateDocument() with
{
Identifiers = new RawIdentifiers(aliasSeries, "GHSA-xxxx"),
};
repository.NextResult = new AdvisoryRawUpsertResult(true, CreateRecord(document));
await service.IngestAsync(document, CancellationToken.None);
Assert.NotNull(repository.CapturedDocument);
Assert.Equal(aliasSeries, repository.CapturedDocument!.Identifiers.Aliases);
}
private static AdvisoryRawService CreateService(RecordingRepository repository) private static AdvisoryRawService CreateService(RecordingRepository repository)
{ {
var writeGuard = new AdvisoryRawWriteGuard(new AocWriteGuard()); var writeGuard = new AdvisoryRawWriteGuard(new AocWriteGuard());

View File

@@ -20,10 +20,12 @@
| ID | Status | Owner(s) | Depends on | Notes | | ID | Status | Owner(s) | Depends on | Notes |
|----|--------|----------|------------|-------| |----|--------|----------|------------|-------|
| EXCITITOR-GRAPH-21-001 `Inspector linkouts` | BLOCKED (2025-10-27) | Excititor Core Guild, Cartographer Guild | EXCITITOR-POLICY-20-002, CARTO-GRAPH-21-005 | Provide batched VEX/advisory reference fetches keyed by graph node PURLs so UI inspector can display raw documents and justification metadata. | | EXCITITOR-GRAPH-21-001 `Inspector linkouts` | BLOCKED (2025-10-27) | Excititor Core Guild, Cartographer Guild | EXCITITOR-POLICY-20-002, CARTO-GRAPH-21-005 | Provide batched VEX/advisory reference fetches keyed by graph node PURLs so UI inspector can display raw documents and justification metadata. |
> 2025-10-27: Pending policy-driven linkset enrichment (`EXCITITOR-POLICY-20-002`) and Cartographer inspector contract (`CARTO-GRAPH-21-005`). No stable payload to target. > 2025-10-27: Pending policy-driven linkset enrichment (`EXCITITOR-POLICY-20-002`) and Cartographer inspector contract (`CARTO-GRAPH-21-005`). No stable payload to target.
| EXCITITOR-GRAPH-21-002 `Overlay enrichment` | BLOCKED (2025-10-27) | Excititor Core Guild | EXCITITOR-GRAPH-21-001, POLICY-ENGINE-30-001 | Ensure overlay metadata includes VEX justification summaries and document versions for Cartographer overlays; update fixtures/tests. | > 2025-10-29: Handshake actions in `docs/dev/cartographer-graph-handshake.md` — draft batch linkout API skeleton + fixture plan once Cartographer delivers query patterns.
> 2025-10-27: Requires inspector linkouts (`EXCITITOR-GRAPH-21-001`) and Policy Engine overlay schema (`POLICY-ENGINE-30-001`) before enrichment can be implemented. | EXCITITOR-GRAPH-21-002 `Overlay enrichment` | BLOCKED (2025-10-27) | Excititor Core Guild | EXCITITOR-GRAPH-21-001, POLICY-ENGINE-30-001 | Ensure overlay metadata includes VEX justification summaries and document versions for Cartographer overlays; update fixtures/tests. |
> 2025-10-27: Requires inspector linkouts (`EXCITITOR-GRAPH-21-001`) and Policy Engine overlay schema (`POLICY-ENGINE-30-001`) before enrichment can be implemented.
> 2025-10-29: Align overlay schema work with the handshake doc once Policy Guild publishes the overlay additions; collect sample payloads for review.
## Link-Not-Merge v1 ## Link-Not-Merge v1

View File

@@ -17,8 +17,9 @@
| ID | Status | Owner(s) | Depends on | Notes | | ID | Status | Owner(s) | Depends on | Notes |
|----|--------|----------|------------|-------| |----|--------|----------|------------|-------|
| EXCITITOR-GRAPH-21-005 `Inspector indexes` | BLOCKED (2025-10-27) | Excititor Storage Guild | EXCITITOR-GRAPH-21-001 | Add indexes/materialized views for VEX lookups by PURL/policy to support Cartographer inspector performance; document migrations. | | EXCITITOR-GRAPH-21-005 `Inspector indexes` | BLOCKED (2025-10-27) | Excititor Storage Guild | EXCITITOR-GRAPH-21-001 | Add indexes/materialized views for VEX lookups by PURL/policy to support Cartographer inspector performance; document migrations. |
> 2025-10-27: Indexed workload requirements depend on Inspector linkouts (`EXCITITOR-GRAPH-21-001`) which are themselves blocked on Cartographer contract. Revisit once access patterns are defined. > 2025-10-27: Indexed workload requirements depend on Inspector linkouts (`EXCITITOR-GRAPH-21-001`) which are themselves blocked on Cartographer contract. Revisit once access patterns are defined.
> 2025-10-29: Per `docs/dev/cartographer-graph-handshake.md`, prepare index sizing doc once Cartographer shares query shapes; include perf targets + migration plan before unblocking.
## Link-Not-Merge v1 ## Link-Not-Merge v1

View File

@@ -26,6 +26,7 @@
| SCANNER-ANALYZERS-JAVA-21-008 | BLOCKED (2025-10-27) | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-003, SCANNER-ANALYZERS-JAVA-21-004, SCANNER-ANALYZERS-JAVA-21-005 | Implement resolver + AOC writer: produce entrypoints (env profiles, warnings), components (jar_id + semantic ids), edges (jpms, cp, spi, reflect, jni) with reason codes/confidence. | Observation JSON for fixtures deterministic; includes entrypoints, edges, warnings; passes AOC compliance lint. | | SCANNER-ANALYZERS-JAVA-21-008 | BLOCKED (2025-10-27) | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-003, SCANNER-ANALYZERS-JAVA-21-004, SCANNER-ANALYZERS-JAVA-21-005 | Implement resolver + AOC writer: produce entrypoints (env profiles, warnings), components (jar_id + semantic ids), edges (jpms, cp, spi, reflect, jni) with reason codes/confidence. | Observation JSON for fixtures deterministic; includes entrypoints, edges, warnings; passes AOC compliance lint. |
| SCANNER-ANALYZERS-JAVA-21-009 | TODO | Java Analyzer Guild, QA Guild | SCANNER-ANALYZERS-JAVA-21-008 | Author comprehensive fixtures (modular app, boot fat jar, war, ear, MR-jar, jlink image, JNI, reflection heavy, signed jar, microprofile) with golden outputs and perf benchmarks. | Fixture suite committed under `fixtures/lang/java/ep`; determinism + benchmark gates (<300ms fat jar) configured in CI. | | SCANNER-ANALYZERS-JAVA-21-009 | TODO | Java Analyzer Guild, QA Guild | SCANNER-ANALYZERS-JAVA-21-008 | Author comprehensive fixtures (modular app, boot fat jar, war, ear, MR-jar, jlink image, JNI, reflection heavy, signed jar, microprofile) with golden outputs and perf benchmarks. | Fixture suite committed under `fixtures/lang/java/ep`; determinism + benchmark gates (<300ms fat jar) configured in CI. |
| SCANNER-ANALYZERS-JAVA-21-010 | TODO | Java Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-JAVA-21-008 | Optional runtime ingestion: Java agent + JFR reader capturing class load, ServiceLoader, and System.load events with path scrubbing. Emit append-only runtime edges `runtime-class`/`runtime-spi`/`runtime-load`. | Runtime harness produces scrubbed events for sample app; edges merge with static output; docs describe sandbox & privacy. | | SCANNER-ANALYZERS-JAVA-21-010 | TODO | Java Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-JAVA-21-008 | Optional runtime ingestion: Java agent + JFR reader capturing class load, ServiceLoader, and System.load events with path scrubbing. Emit append-only runtime edges `runtime-class`/`runtime-spi`/`runtime-load`. | Runtime harness produces scrubbed events for sample app; edges merge with static output; docs describe sandbox & privacy. |
| SCANNER-ANALYZERS-JAVA-21-011 | TODO | Java Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-JAVA-21-008 | Package analyzer as restart-time plug-in (manifest/DI), update Offline Kit docs, add CLI/worker hooks for Java inspection commands. | Plugin manifest deployed to `plugins/scanner/analyzers/lang/`; Worker loads new analyzer; Offline Kit + CLI instructions updated; smoke test verifies packaging. | | SCANNER-ANALYZERS-JAVA-21-011 | TODO | Java Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-JAVA-21-008 | Package analyzer as restart-time plug-in (manifest/DI), update Offline Kit docs, add CLI/worker hooks for Java inspection commands. | Plugin manifest deployed to `plugins/scanner/analyzers/lang/`; Worker loads new analyzer; Offline Kit + CLI instructions updated; smoke test verifies packaging. |
> 2025-10-27 — SCANNER-ANALYZERS-JAVA-21-008 blocked pending upstream completion of tasks 003005 (module/classpath resolver, SPI scanner, reflection/config extraction). Observation writer needs their outputs for components/edges/warnings per exit criteria. > 2025-10-27 — SCANNER-ANALYZERS-JAVA-21-008 blocked pending upstream completion of tasks 003005 (module/classpath resolver, SPI scanner, reflection/config extraction). Observation writer needs their outputs for components/edges/warnings per exit criteria.
> 2025-10-29 — See `docs/dev/java-analyzer-observation-plan.md` for prerequisite checklist and target dates; unblock once reflection/config/JNI tasks land and observation schema is frozen.

View File

@@ -0,0 +1,593 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Cryptography.Kms;
/// <summary>
/// File-backed KMS implementation that stores encrypted key material on disk.
/// </summary>
public sealed class FileKmsClient : IKmsClient, IDisposable
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
Converters =
{
new JsonStringEnumConverter(),
},
};
private readonly FileKmsOptions _options;
private readonly SemaphoreSlim _mutex = new(1, 1);
public FileKmsClient(FileKmsOptions options)
{
ArgumentNullException.ThrowIfNull(options);
if (string.IsNullOrWhiteSpace(options.RootPath))
{
throw new ArgumentException("Root path must be provided.", nameof(options));
}
if (string.IsNullOrWhiteSpace(options.Password))
{
throw new ArgumentException("Password must be provided.", nameof(options));
}
_options = options;
Directory.CreateDirectory(_options.RootPath);
}
public async Task<KmsSignResult> SignAsync(
string keyId,
string? keyVersion,
ReadOnlyMemory<byte> data,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(keyId);
if (data.IsEmpty)
{
throw new ArgumentException("Data cannot be empty.", nameof(data));
}
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false)
?? throw new InvalidOperationException($"Key '{keyId}' does not exist.");
if (record.State == KmsKeyState.Revoked)
{
throw new InvalidOperationException($"Key '{keyId}' is revoked and cannot be used for signing.");
}
var version = ResolveVersion(record, keyVersion);
if (version.State != KmsKeyState.Active)
{
throw new InvalidOperationException($"Key version '{version.VersionId}' is not active. Current state: {version.State}");
}
var privateKey = await LoadPrivateKeyAsync(record, version, cancellationToken).ConfigureAwait(false);
var signature = SignData(privateKey, data.Span);
return new KmsSignResult(record.KeyId, version.VersionId, record.Algorithm, signature);
}
finally
{
_mutex.Release();
}
}
public async Task<bool> VerifyAsync(
string keyId,
string? keyVersion,
ReadOnlyMemory<byte> data,
ReadOnlyMemory<byte> signature,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(keyId);
if (data.IsEmpty || signature.IsEmpty)
{
return false;
}
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false);
if (record is null)
{
return false;
}
var version = ResolveVersion(record, keyVersion);
if (string.IsNullOrWhiteSpace(version.PublicKey))
{
return false;
}
return VerifyData(version.CurveName, version.PublicKey, data.Span, signature.Span);
}
finally
{
_mutex.Release();
}
}
public async Task<KmsKeyMetadata> GetMetadataAsync(string keyId, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(keyId);
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false)
?? throw new InvalidOperationException($"Key '{keyId}' does not exist.");
return ToMetadata(record);
}
finally
{
_mutex.Release();
}
}
public async Task<KmsKeyMaterial> ExportAsync(string keyId, string? keyVersion, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(keyId);
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false)
?? throw new InvalidOperationException($"Key '{keyId}' does not exist.");
var version = ResolveVersion(record, keyVersion);
if (string.IsNullOrWhiteSpace(version.PublicKey))
{
throw new InvalidOperationException($"Key '{keyId}' version '{version.VersionId}' does not have public key material.");
}
var privateKey = await LoadPrivateKeyAsync(record, version, cancellationToken).ConfigureAwait(false);
return new KmsKeyMaterial(
record.KeyId,
version.VersionId,
record.Algorithm,
version.CurveName,
Convert.FromBase64String(privateKey.D),
Convert.FromBase64String(privateKey.Qx),
Convert.FromBase64String(privateKey.Qy),
version.CreatedAt);
}
finally
{
_mutex.Release();
}
}
public async Task<KmsKeyMetadata> RotateAsync(string keyId, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(keyId);
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: true).ConfigureAwait(false)
?? throw new InvalidOperationException("Failed to create or load key metadata.");
if (record.State == KmsKeyState.Revoked)
{
throw new InvalidOperationException($"Key '{keyId}' has been revoked and cannot be rotated.");
}
var timestamp = DateTimeOffset.UtcNow;
var versionId = $"{timestamp:yyyyMMddTHHmmssfffZ}";
var keyData = CreateKeyMaterial(record.Algorithm);
try
{
var envelope = EncryptPrivateKey(keyData.PrivateBlob);
var fileName = $"{versionId}.key.json";
var keyPath = Path.Combine(GetKeyDirectory(keyId), fileName);
await WriteJsonAsync(keyPath, envelope, cancellationToken).ConfigureAwait(false);
foreach (var existing in record.Versions.Where(v => v.State == KmsKeyState.Active))
{
existing.State = KmsKeyState.PendingRotation;
}
record.Versions.Add(new KeyVersionRecord
{
VersionId = versionId,
State = KmsKeyState.Active,
CreatedAt = timestamp,
PublicKey = keyData.PublicKey,
CurveName = keyData.Curve,
FileName = fileName,
});
record.CreatedAt ??= timestamp;
record.State = KmsKeyState.Active;
record.ActiveVersion = versionId;
await SaveMetadataAsync(record, cancellationToken).ConfigureAwait(false);
return ToMetadata(record);
}
finally
{
CryptographicOperations.ZeroMemory(keyData.PrivateBlob);
}
}
finally
{
_mutex.Release();
}
}
public async Task RevokeAsync(string keyId, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(keyId);
await _mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
var record = await LoadOrCreateMetadataAsync(keyId, cancellationToken, createIfMissing: false).ConfigureAwait(false)
?? throw new InvalidOperationException($"Key '{keyId}' does not exist.");
var timestamp = DateTimeOffset.UtcNow;
record.State = KmsKeyState.Revoked;
foreach (var version in record.Versions)
{
if (version.State != KmsKeyState.Revoked)
{
version.State = KmsKeyState.Revoked;
version.DeactivatedAt = timestamp;
}
}
await SaveMetadataAsync(record, cancellationToken).ConfigureAwait(false);
}
finally
{
_mutex.Release();
}
}
private static string GetMetadataPath(string root, string keyId)
=> Path.Combine(root, keyId, "metadata.json");
private string GetKeyDirectory(string keyId)
{
var path = Path.Combine(_options.RootPath, keyId);
Directory.CreateDirectory(path);
return path;
}
private async Task<KeyMetadataRecord?> LoadOrCreateMetadataAsync(
string keyId,
CancellationToken cancellationToken,
bool createIfMissing)
{
var metadataPath = GetMetadataPath(_options.RootPath, keyId);
if (!File.Exists(metadataPath))
{
if (!createIfMissing)
{
return null;
}
var record = new KeyMetadataRecord
{
KeyId = keyId,
Algorithm = _options.Algorithm,
State = KmsKeyState.Active,
CreatedAt = DateTimeOffset.UtcNow,
};
await SaveMetadataAsync(record, cancellationToken).ConfigureAwait(false);
return record;
}
await using var stream = File.Open(metadataPath, FileMode.Open, FileAccess.Read, FileShare.Read);
var loadedRecord = await JsonSerializer.DeserializeAsync<KeyMetadataRecord>(stream, JsonOptions, cancellationToken).ConfigureAwait(false);
if (loadedRecord is null)
{
return null;
}
if (string.IsNullOrWhiteSpace(loadedRecord.Algorithm))
{
loadedRecord.Algorithm = KmsAlgorithms.Es256;
}
foreach (var version in loadedRecord.Versions)
{
if (string.IsNullOrWhiteSpace(version.CurveName))
{
version.CurveName = "nistP256";
}
}
return loadedRecord;
}
private async Task SaveMetadataAsync(KeyMetadataRecord record, CancellationToken cancellationToken)
{
var metadataPath = GetMetadataPath(_options.RootPath, record.KeyId);
Directory.CreateDirectory(Path.GetDirectoryName(metadataPath)!);
await using var stream = File.Open(metadataPath, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, record, JsonOptions, cancellationToken).ConfigureAwait(false);
}
private async Task<EcdsaPrivateKeyRecord> LoadPrivateKeyAsync(KeyMetadataRecord record, KeyVersionRecord version, CancellationToken cancellationToken)
{
var keyPath = Path.Combine(GetKeyDirectory(record.KeyId), version.FileName);
if (!File.Exists(keyPath))
{
throw new InvalidOperationException($"Key material for version '{version.VersionId}' was not found.");
}
await using var stream = File.Open(keyPath, FileMode.Open, FileAccess.Read, FileShare.Read);
var envelope = await JsonSerializer.DeserializeAsync<KeyEnvelope>(stream, JsonOptions, cancellationToken).ConfigureAwait(false)
?? throw new InvalidOperationException("Key envelope could not be deserialized.");
var payload = DecryptPrivateKey(envelope);
try
{
return JsonSerializer.Deserialize<EcdsaPrivateKeyRecord>(payload, JsonOptions)
?? throw new InvalidOperationException("Key payload could not be deserialized.");
}
finally
{
CryptographicOperations.ZeroMemory(payload);
}
}
private static KeyVersionRecord ResolveVersion(KeyMetadataRecord record, string? keyVersion)
{
KeyVersionRecord? version = null;
if (!string.IsNullOrWhiteSpace(keyVersion))
{
version = record.Versions.SingleOrDefault(v => string.Equals(v.VersionId, keyVersion, StringComparison.Ordinal));
if (version is null)
{
throw new InvalidOperationException($"Key version '{keyVersion}' does not exist for key '{record.KeyId}'.");
}
}
else if (!string.IsNullOrWhiteSpace(record.ActiveVersion))
{
version = record.Versions.SingleOrDefault(v => string.Equals(v.VersionId, record.ActiveVersion, StringComparison.Ordinal));
}
version ??= record.Versions
.Where(v => v.State == KmsKeyState.Active)
.OrderByDescending(v => v.CreatedAt)
.FirstOrDefault();
if (version is null)
{
throw new InvalidOperationException($"Key '{record.KeyId}' does not have an active version.");
}
return version;
}
private EcdsaKeyData CreateKeyMaterial(string algorithm)
{
if (!string.Equals(algorithm, KmsAlgorithms.Es256, StringComparison.OrdinalIgnoreCase))
{
throw new NotSupportedException($"Algorithm '{algorithm}' is not supported by the file KMS driver.");
}
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
var parameters = ecdsa.ExportParameters(true);
var keyRecord = new EcdsaPrivateKeyRecord
{
Curve = "nistP256",
D = Convert.ToBase64String(parameters.D ?? Array.Empty<byte>()),
Qx = Convert.ToBase64String(parameters.Q.X ?? Array.Empty<byte>()),
Qy = Convert.ToBase64String(parameters.Q.Y ?? Array.Empty<byte>()),
};
var privateBlob = JsonSerializer.SerializeToUtf8Bytes(keyRecord, JsonOptions);
var qx = parameters.Q.X ?? Array.Empty<byte>();
var qy = parameters.Q.Y ?? Array.Empty<byte>();
var publicKey = new byte[qx.Length + qy.Length];
Buffer.BlockCopy(qx, 0, publicKey, 0, qx.Length);
Buffer.BlockCopy(qy, 0, publicKey, qx.Length, qy.Length);
return new EcdsaKeyData(privateBlob, Convert.ToBase64String(publicKey), keyRecord.Curve);
}
private byte[] SignData(EcdsaPrivateKeyRecord privateKey, ReadOnlySpan<byte> data)
{
var parameters = new ECParameters
{
Curve = ResolveCurve(privateKey.Curve),
D = Convert.FromBase64String(privateKey.D),
Q = new ECPoint
{
X = Convert.FromBase64String(privateKey.Qx),
Y = Convert.FromBase64String(privateKey.Qy),
},
};
using var ecdsa = ECDsa.Create();
ecdsa.ImportParameters(parameters);
return ecdsa.SignData(data.ToArray(), HashAlgorithmName.SHA256);
}
private bool VerifyData(string curveName, string publicKeyBase64, ReadOnlySpan<byte> data, ReadOnlySpan<byte> signature)
{
var publicKey = Convert.FromBase64String(publicKeyBase64);
if (publicKey.Length % 2 != 0)
{
return false;
}
var half = publicKey.Length / 2;
var qx = publicKey[..half];
var qy = publicKey[half..];
var parameters = new ECParameters
{
Curve = ResolveCurve(curveName),
Q = new ECPoint
{
X = qx,
Y = qy,
},
};
using var ecdsa = ECDsa.Create();
ecdsa.ImportParameters(parameters);
return ecdsa.VerifyData(data.ToArray(), signature.ToArray(), HashAlgorithmName.SHA256);
}
private KeyEnvelope EncryptPrivateKey(ReadOnlySpan<byte> privateKey)
{
var salt = RandomNumberGenerator.GetBytes(16);
var nonce = RandomNumberGenerator.GetBytes(12);
var key = DeriveKey(salt);
try
{
var ciphertext = new byte[privateKey.Length];
var tag = new byte[16];
var plaintextCopy = privateKey.ToArray();
try
{
AesGcm.Encrypt(key, nonce, plaintextCopy, ciphertext, tag);
}
finally
{
CryptographicOperations.ZeroMemory(plaintextCopy);
}
return new KeyEnvelope(
Ciphertext: Convert.ToBase64String(ciphertext),
Nonce: Convert.ToBase64String(nonce),
Tag: Convert.ToBase64String(tag),
Salt: Convert.ToBase64String(salt));
}
finally
{
CryptographicOperations.ZeroMemory(key);
}
}
private byte[] DecryptPrivateKey(KeyEnvelope envelope)
{
var salt = Convert.FromBase64String(envelope.Salt);
var nonce = Convert.FromBase64String(envelope.Nonce);
var tag = Convert.FromBase64String(envelope.Tag);
var ciphertext = Convert.FromBase64String(envelope.Ciphertext);
var key = DeriveKey(salt);
try
{
var plaintext = new byte[ciphertext.Length];
AesGcm.Decrypt(key, nonce, ciphertext, tag, plaintext);
return plaintext;
}
finally
{
CryptographicOperations.ZeroMemory(key);
}
}
private byte[] DeriveKey(byte[] salt)
{
var key = new byte[32];
try
{
var passwordBytes = Encoding.UTF8.GetBytes(_options.Password);
try
{
var derived = Rfc2898DeriveBytes.Pbkdf2(passwordBytes, salt, _options.KeyDerivationIterations, HashAlgorithmName.SHA256, key.Length);
derived.CopyTo(key.AsSpan());
CryptographicOperations.ZeroMemory(derived);
return key;
}
finally
{
CryptographicOperations.ZeroMemory(passwordBytes);
}
}
catch
{
CryptographicOperations.ZeroMemory(key);
throw;
}
}
private static async Task WriteJsonAsync<T>(string path, T value, CancellationToken cancellationToken)
{
await using var stream = File.Open(path, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, value, JsonOptions, cancellationToken).ConfigureAwait(false);
}
private static KmsKeyMetadata ToMetadata(KeyMetadataRecord record)
{
var versions = record.Versions
.Select(v => new KmsKeyVersionMetadata(
v.VersionId,
v.State,
v.CreatedAt,
v.DeactivatedAt,
v.PublicKey,
v.CurveName))
.ToImmutableArray();
var createdAt = record.CreatedAt ?? (versions.Length > 0 ? versions.Min(v => v.CreatedAt) : DateTimeOffset.UtcNow);
return new KmsKeyMetadata(record.KeyId, record.Algorithm, record.State, createdAt, versions);
}
private sealed class KeyMetadataRecord
{
public string KeyId { get; set; } = string.Empty;
public string Algorithm { get; set; } = KmsAlgorithms.Es256;
public KmsKeyState State { get; set; } = KmsKeyState.Active;
public DateTimeOffset? CreatedAt { get; set; }
public string? ActiveVersion { get; set; }
public List<KeyVersionRecord> Versions { get; set; } = new();
}
private sealed class KeyVersionRecord
{
public string VersionId { get; set; } = string.Empty;
public KmsKeyState State { get; set; } = KmsKeyState.Active;
public DateTimeOffset CreatedAt { get; set; }
public DateTimeOffset? DeactivatedAt { get; set; }
public string PublicKey { get; set; } = string.Empty;
public string FileName { get; set; } = string.Empty;
public string CurveName { get; set; } = string.Empty;
}
private sealed record KeyEnvelope(
string Ciphertext,
string Nonce,
string Tag,
string Salt);
private sealed record EcdsaKeyData(byte[] PrivateBlob, string PublicKey, string Curve);
private sealed class EcdsaPrivateKeyRecord
{
public string Curve { get; set; } = string.Empty;
public string D { get; set; } = string.Empty;
public string Qx { get; set; } = string.Empty;
public string Qy { get; set; } = string.Empty;
}
private static ECCurve ResolveCurve(string curveName) => curveName switch
{
"nistP256" or "P-256" or "ES256" => ECCurve.NamedCurves.nistP256,
_ => throw new NotSupportedException($"Curve '{curveName}' is not supported."),
};
public void Dispose() => _mutex.Dispose();
}

View File

@@ -0,0 +1,27 @@
namespace StellaOps.Cryptography.Kms;
/// <summary>
/// Options for the <see cref="FileKmsClient"/>.
/// </summary>
public sealed class FileKmsOptions
{
/// <summary>
/// Root directory for storing key material.
/// </summary>
public string RootPath { get; set; } = string.Empty;
/// <summary>
/// Password used to encrypt private key material at rest.
/// </summary>
public required string Password { get; set; }
/// <summary>
/// Signing algorithm identifier (default ED25519).
/// </summary>
public string Algorithm { get; set; } = KmsAlgorithms.Es256;
/// <summary>
/// PBKDF2 iteration count for envelope encryption.
/// </summary>
public int KeyDerivationIterations { get; set; } = 100_000;
}

View File

@@ -0,0 +1,51 @@
using System.Threading;
namespace StellaOps.Cryptography.Kms;
/// <summary>
/// Provides signing key operations backed by a key management system (KMS).
/// </summary>
public interface IKmsClient
{
/// <summary>
/// Signs the supplied digest with the specified key version.
/// </summary>
Task<KmsSignResult> SignAsync(
string keyId,
string? keyVersion,
ReadOnlyMemory<byte> data,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a signature produced by <see cref="SignAsync"/>.
/// </summary>
Task<bool> VerifyAsync(
string keyId,
string? keyVersion,
ReadOnlyMemory<byte> data,
ReadOnlyMemory<byte> signature,
CancellationToken cancellationToken = default);
/// <summary>
/// Retrieves metadata for the current key and versions.
/// </summary>
Task<KmsKeyMetadata> GetMetadataAsync(string keyId, CancellationToken cancellationToken = default);
/// <summary>
/// Exports the key material required for local verification.
/// </summary>
Task<KmsKeyMaterial> ExportAsync(
string keyId,
string? keyVersion,
CancellationToken cancellationToken = default);
/// <summary>
/// Generates a new active key version for the specified key.
/// </summary>
Task<KmsKeyMetadata> RotateAsync(string keyId, CancellationToken cancellationToken = default);
/// <summary>
/// Revokes a key, preventing future signing operations.
/// </summary>
Task RevokeAsync(string keyId, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,9 @@
namespace StellaOps.Cryptography.Kms;
/// <summary>
/// Supported algorithm identifiers for the KMS abstraction.
/// </summary>
public static class KmsAlgorithms
{
public const string Es256 = "ES256";
}

View File

@@ -0,0 +1,120 @@
using System.Collections.Concurrent;
using System.Security.Cryptography;
using Microsoft.IdentityModel.Tokens;
using StellaOps.Cryptography;
namespace StellaOps.Cryptography.Kms;
/// <summary>
/// Crypto provider that delegates signing operations to a KMS backend.
/// </summary>
public sealed class KmsCryptoProvider : ICryptoProvider
{
private readonly IKmsClient _kmsClient;
private readonly ConcurrentDictionary<string, KmsSigningRegistration> _registrations = new(StringComparer.OrdinalIgnoreCase);
public KmsCryptoProvider(IKmsClient kmsClient)
=> _kmsClient = kmsClient ?? throw new ArgumentNullException(nameof(kmsClient));
public string Name => "kms";
public bool Supports(CryptoCapability capability, string algorithmId)
{
if (!string.Equals(algorithmId, KmsAlgorithms.Es256, StringComparison.OrdinalIgnoreCase))
{
return false;
}
return capability is CryptoCapability.Signing or CryptoCapability.Verification;
}
public IPasswordHasher GetPasswordHasher(string algorithmId)
=> throw new InvalidOperationException($"Provider '{Name}' does not support password hashing.");
public ICryptoSigner GetSigner(string algorithmId, CryptoKeyReference keyReference)
{
ArgumentNullException.ThrowIfNull(keyReference);
if (!Supports(CryptoCapability.Signing, algorithmId))
{
throw new InvalidOperationException($"Signing algorithm '{algorithmId}' is not supported by provider '{Name}'.");
}
if (!_registrations.TryGetValue(keyReference.KeyId, out var registration))
{
throw new KeyNotFoundException($"Signing key '{keyReference.KeyId}' is not registered with provider '{Name}'.");
}
return new KmsSigner(_kmsClient, registration);
}
public void UpsertSigningKey(CryptoSigningKey signingKey)
{
ArgumentNullException.ThrowIfNull(signingKey);
if (!string.Equals(signingKey.AlgorithmId, KmsAlgorithms.Es256, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"Provider '{Name}' only supports {KmsAlgorithms.Es256} signing keys.");
}
if (signingKey.Metadata is null ||
!signingKey.Metadata.TryGetValue(KmsMetadataKeys.Version, out var versionId) ||
string.IsNullOrWhiteSpace(versionId))
{
throw new InvalidOperationException("KMS signing keys must include metadata entry 'kms.version'.");
}
var registration = new KmsSigningRegistration(signingKey.Reference.KeyId, versionId!, signingKey.AlgorithmId);
_registrations.AddOrUpdate(signingKey.Reference.KeyId, registration, (_, _) => registration);
}
public bool RemoveSigningKey(string keyId)
{
if (string.IsNullOrWhiteSpace(keyId))
{
return false;
}
return _registrations.TryRemove(keyId, out _);
}
public IReadOnlyCollection<CryptoSigningKey> GetSigningKeys()
{
var list = new List<CryptoSigningKey>();
foreach (var registration in _registrations.Values)
{
var material = _kmsClient.ExportAsync(registration.KeyId, registration.VersionId).GetAwaiter().GetResult();
var parameters = new ECParameters
{
Curve = ECCurve.NamedCurves.nistP256,
D = material.D,
Q = new ECPoint
{
X = material.Qx,
Y = material.Qy,
},
};
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
{
[KmsMetadataKeys.Version] = material.VersionId
};
list.Add(new CryptoSigningKey(
new CryptoKeyReference(material.KeyId, Name),
material.Algorithm,
in parameters,
material.CreatedAt,
metadata: metadata));
}
return list;
}
internal static class KmsMetadataKeys
{
public const string Version = "kms.version";
}
}
internal sealed record KmsSigningRegistration(string KeyId, string VersionId, string Algorithm);

View File

@@ -0,0 +1,14 @@
namespace StellaOps.Cryptography.Kms;
/// <summary>
/// Represents exported key material for verification and registration.
/// </summary>
public sealed record KmsKeyMaterial(
string KeyId,
string VersionId,
string Algorithm,
string Curve,
byte[] D,
byte[] Qx,
byte[] Qy,
DateTimeOffset CreatedAt);

View File

@@ -0,0 +1,24 @@
using System.Collections.Immutable;
namespace StellaOps.Cryptography.Kms;
/// <summary>
/// Describes a logical KMS key and its versions.
/// </summary>
public sealed record KmsKeyMetadata(
string KeyId,
string Algorithm,
KmsKeyState State,
DateTimeOffset CreatedAt,
ImmutableArray<KmsKeyVersionMetadata> Versions);
/// <summary>
/// Describes a specific key version.
/// </summary>
public sealed record KmsKeyVersionMetadata(
string VersionId,
KmsKeyState State,
DateTimeOffset CreatedAt,
DateTimeOffset? DeactivatedAt,
string PublicKey,
string Curve);

View File

@@ -0,0 +1,11 @@
namespace StellaOps.Cryptography.Kms;
/// <summary>
/// Represents the lifecycle state of a KMS key or key version.
/// </summary>
public enum KmsKeyState
{
Active = 0,
PendingRotation = 1,
Revoked = 2,
}

View File

@@ -0,0 +1,10 @@
namespace StellaOps.Cryptography.Kms;
/// <summary>
/// Represents the output of a signing operation.
/// </summary>
public sealed record KmsSignResult(
string KeyId,
string VersionId,
string Algorithm,
byte[] Signature);

View File

@@ -0,0 +1,55 @@
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.IdentityModel.Tokens;
using StellaOps.Cryptography;
namespace StellaOps.Cryptography.Kms;
internal sealed class KmsSigner : ICryptoSigner
{
private readonly IKmsClient _client;
private readonly string _keyId;
private readonly string _versionId;
private readonly string _algorithm;
public KmsSigner(IKmsClient client, KmsSigningRegistration registration)
{
_client = client;
_keyId = registration.KeyId;
_versionId = registration.VersionId;
_algorithm = registration.Algorithm;
}
public string KeyId => _keyId;
public string AlgorithmId => _algorithm;
public async ValueTask<byte[]> SignAsync(ReadOnlyMemory<byte> data, CancellationToken cancellationToken = default)
{
var result = await _client.SignAsync(_keyId, _versionId, data, cancellationToken).ConfigureAwait(false);
return result.Signature;
}
public ValueTask<bool> VerifyAsync(ReadOnlyMemory<byte> data, ReadOnlyMemory<byte> signature, CancellationToken cancellationToken = default)
=> new(_client.VerifyAsync(_keyId, _versionId, data, signature, cancellationToken));
public JsonWebKey ExportPublicJsonWebKey()
{
var material = _client.ExportAsync(_keyId, _versionId).GetAwaiter().GetResult();
var jwk = new JsonWebKey
{
Kid = material.KeyId,
Alg = material.Algorithm,
Kty = JsonWebAlgorithmsKeyTypes.EllipticCurve,
Use = JsonWebKeyUseNames.Sig,
Crv = JsonWebKeyECTypes.P256,
};
jwk.KeyOps.Add("sign");
jwk.KeyOps.Add("verify");
jwk.X = Base64UrlEncoder.Encode(material.Qx);
jwk.Y = Base64UrlEncoder.Encode(material.Qy);
return jwk;
}
}

View File

@@ -0,0 +1,32 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Options;
using StellaOps.Cryptography;
namespace StellaOps.Cryptography.Kms;
/// <summary>
/// Dependency injection helpers for the KMS client and crypto provider.
/// </summary>
public static class ServiceCollectionExtensions
{
public static IServiceCollection AddFileKms(
this IServiceCollection services,
Action<FileKmsOptions> configure)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configure);
services.Configure(configure);
services.TryAddSingleton<IKmsClient>(sp =>
{
var options = sp.GetRequiredService<IOptions<FileKmsOptions>>().Value;
return new FileKmsClient(options);
});
services.TryAddEnumerable(ServiceDescriptor.Singleton<ICryptoProvider, KmsCryptoProvider>());
return services;
}
}

View File

@@ -0,0 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="9.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>

View File

@@ -3,7 +3,7 @@
## Sprint 72 Abstractions & File Driver ## Sprint 72 Abstractions & File Driver
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | | ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------| |----|--------|----------|------------|-------------|---------------|
| KMS-72-001 | TODO | KMS Guild | — | Implement KMS interface (sign, verify, metadata, rotate, revoke) and file-based key driver with encrypted at-rest storage. | Interface + file driver operational; unit tests cover sign/verify/rotation; lint passes. | | KMS-72-001 | DOING (2025-10-29) | KMS Guild | — | Implement KMS interface (sign, verify, metadata, rotate, revoke) and file-based key driver with encrypted at-rest storage. | Interface + file driver operational; unit tests cover sign/verify/rotation; lint passes.<br>2025-10-29: `FileKmsClient` (ES256) file driver scaffolding committed under `StellaOps.Cryptography.Kms`; includes disk encryption + unit tests. Follow-up: address PBKDF2/AesGcm warnings and wire into Authority services. |
| KMS-72-002 | TODO | KMS Guild | KMS-72-001 | Add CLI support for importing/exporting file-based keys with password protection. | CLI commands functional; docs updated; integration tests pass. | | KMS-72-002 | TODO | KMS Guild | KMS-72-001 | Add CLI support for importing/exporting file-based keys with password protection. | CLI commands functional; docs updated; integration tests pass. |
## Sprint 73 Cloud & HSM Integration ## Sprint 73 Cloud & HSM Integration

View File

@@ -0,0 +1,112 @@
using System.Security.Cryptography;
using StellaOps.Cryptography.Kms;
namespace StellaOps.Cryptography.Kms.Tests;
public sealed class FileKmsClientTests : IDisposable
{
private readonly string _rootPath;
public FileKmsClientTests()
{
_rootPath = Path.Combine(Path.GetTempPath(), $"kms-tests-{Guid.NewGuid():N}");
}
[Fact]
public async Task RotateSignVerifyLifecycle_Works()
{
using var client = CreateClient();
var keyId = "kms-test-key";
// Initial rotate creates the key.
var metadata = await client.RotateAsync(keyId);
Assert.Equal(keyId, metadata.KeyId);
Assert.Single(metadata.Versions);
Assert.Equal(KmsKeyState.Active, metadata.State);
var version = metadata.Versions[0];
Assert.Equal(KmsKeyState.Active, version.State);
var firstData = RandomNumberGenerator.GetBytes(256);
var firstSignature = await client.SignAsync(keyId, null, firstData);
Assert.Equal(keyId, firstSignature.KeyId);
Assert.Equal(KmsAlgorithms.Es256, firstSignature.Algorithm);
Assert.True(await client.VerifyAsync(keyId, firstSignature.VersionId, firstData, firstSignature.Signature));
// Rotate again and ensure metadata reflects both versions.
var rotated = await client.RotateAsync(keyId);
Assert.Equal(2, rotated.Versions.Length);
var activeVersion = rotated.Versions.Single(v => v.State == KmsKeyState.Active);
Assert.Equal(rotated.Versions.Max(v => v.VersionId), activeVersion.VersionId);
var previousVersion = rotated.Versions.Single(v => v.State != KmsKeyState.Active);
Assert.Equal(KmsKeyState.PendingRotation, previousVersion.State);
var newData = RandomNumberGenerator.GetBytes(128);
var activeSignature = await client.SignAsync(keyId, null, newData);
Assert.Equal(activeVersion.VersionId, activeSignature.VersionId);
Assert.True(await client.VerifyAsync(keyId, null, newData, activeSignature.Signature));
// Explicit version verify should still pass for previous version using the old signature.
Assert.True(await client.VerifyAsync(keyId, previousVersion.VersionId, firstData, firstSignature.Signature));
}
[Fact]
public async Task RevokePreventsSigning()
{
using var client = CreateClient();
var keyId = "kms-revoke";
await client.RotateAsync(keyId);
await client.RevokeAsync(keyId);
var metadata = await client.GetMetadataAsync(keyId);
Assert.Equal(KmsKeyState.Revoked, metadata.State);
Assert.All(metadata.Versions, v => Assert.Equal(KmsKeyState.Revoked, v.State));
var data = RandomNumberGenerator.GetBytes(32);
await Assert.ThrowsAsync<InvalidOperationException>(() => client.SignAsync(keyId, null, data));
}
[Fact]
public async Task ExportAsync_ReturnsKeyMaterial()
{
using var client = CreateClient();
var keyId = "kms-export";
await client.RotateAsync(keyId);
var material = await client.ExportAsync(keyId, null);
Assert.Equal(keyId, material.KeyId);
Assert.Equal(KmsAlgorithms.Es256, material.Algorithm);
Assert.Equal("nistP256", material.Curve);
Assert.NotEmpty(material.D);
Assert.NotEmpty(material.Qx);
Assert.NotEmpty(material.Qy);
}
private FileKmsClient CreateClient()
{
var options = new FileKmsOptions
{
RootPath = _rootPath,
Password = "P@ssw0rd!",
Algorithm = KmsAlgorithms.Es256,
};
return new FileKmsClient(options);
}
public void Dispose()
{
try
{
if (Directory.Exists(_rootPath))
{
Directory.Delete(_rootPath, recursive: true);
}
}
catch
{
// ignore cleanup errors
}
}
}

View File

@@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.4" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
</ItemGroup>
<ItemGroup>
<Using Include="Xunit" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../StellaOps.Cryptography.Kms/StellaOps.Cryptography.Kms.csproj" />
</ItemGroup>
</Project>